From 71705da53be1397ac29aad5f0f8af181fd2b5e52 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 6 Feb 2020 09:08:04 -0800 Subject: [PATCH] chore: remove libraries for repo split (#10348) Removes the following libraries: * bigquery_datatransfer * bigquery_storage * dataproc * logging * monitoring * phishingprotection --- README.rst | 22 +- bigquery_datatransfer/.coveragerc | 19 - bigquery_datatransfer/.flake8 | 14 - bigquery_datatransfer/.repo-metadata.json | 13 - bigquery_datatransfer/CHANGELOG.md | 132 - bigquery_datatransfer/LICENSE | 201 - bigquery_datatransfer/MANIFEST.in | 5 - bigquery_datatransfer/README.rst | 111 - bigquery_datatransfer/docs/README.rst | 1 - bigquery_datatransfer/docs/_static/custom.css | 4 - .../docs/_templates/layout.html | 50 - bigquery_datatransfer/docs/changelog.md | 1 - bigquery_datatransfer/docs/conf.py | 356 -- bigquery_datatransfer/docs/gapic/v1/api.rst | 6 - bigquery_datatransfer/docs/gapic/v1/types.rst | 5 - bigquery_datatransfer/docs/index.rst | 22 - bigquery_datatransfer/google/__init__.py | 24 - .../google/cloud/__init__.py | 24 - .../google/cloud/bigquery_datatransfer.py | 25 - .../bigquery_datatransfer_v1/__init__.py | 41 - .../gapic/__init__.py | 0 .../gapic/data_transfer_service_client.py | 1561 ------ .../data_transfer_service_client_config.py | 93 - .../bigquery_datatransfer_v1/gapic/enums.py | 164 - .../gapic/transports/__init__.py | 0 .../data_transfer_service_grpc_transport.py | 313 -- .../proto/__init__.py | 0 .../proto/datasource.proto | 542 -- .../proto/datasource_pb2.py | 2221 -------- .../proto/datasource_pb2_grpc.py | 229 - .../proto/datatransfer.proto | 838 --- .../proto/datatransfer_pb2.py | 3353 ------------ .../proto/datatransfer_pb2_grpc.py | 296 -- .../proto/transfer.proto | 285 - .../proto/transfer_pb2.py | 1268 ----- .../proto/transfer_pb2_grpc.py | 2 - .../cloud/bigquery_datatransfer_v1/types.py | 61 - bigquery_datatransfer/noxfile.py | 179 - bigquery_datatransfer/samples/__init__.py | 0 .../samples/create_scheduled_query.py | 97 - .../samples/requirements.txt | 1 - .../samples/tests/__init__.py | 0 .../tests/test_create_scheduled_query.py | 82 - bigquery_datatransfer/setup.cfg | 3 - bigquery_datatransfer/setup.py | 86 - bigquery_datatransfer/synth.metadata | 189 - bigquery_datatransfer/synth.py | 68 - .../test_system_data_transfer_service_v1.py | 30 - .../test_data_transfer_service_client_v1.py | 731 --- bigquery_datatransfer/tests/unit/test_shim.py | 29 - bigquery_storage/.coveragerc | 19 - bigquery_storage/.flake8 | 14 - bigquery_storage/.gitignore | 1 - bigquery_storage/.repo-metadata.json | 13 - bigquery_storage/CHANGELOG.md | 140 - bigquery_storage/LICENSE | 201 - bigquery_storage/MANIFEST.in | 5 - bigquery_storage/README.rst | 102 - bigquery_storage/docs/README.rst | 1 - bigquery_storage/docs/_static/custom.css | 4 - bigquery_storage/docs/_templates/layout.html | 50 - bigquery_storage/docs/conf.py | 327 -- bigquery_storage/docs/gapic/v1beta1/api.rst | 6 - .../docs/gapic/v1beta1/reader.rst | 6 - bigquery_storage/docs/gapic/v1beta1/types.rst | 5 - bigquery_storage/docs/index.rst | 19 - bigquery_storage/docs/samples | 1 - bigquery_storage/google/__init__.py | 24 - bigquery_storage/google/cloud/__init__.py | 24 - .../google/cloud/bigquery_storage.py | 25 - .../bigquery_storage_v1beta1/__init__.py | 43 - .../cloud/bigquery_storage_v1beta1/client.py | 126 - .../gapic/__init__.py | 0 .../gapic/big_query_storage_client.py | 671 --- .../gapic/big_query_storage_client_config.py | 67 - .../bigquery_storage_v1beta1/gapic/enums.py | 58 - .../gapic/transports/__init__.py | 0 .../big_query_storage_grpc_transport.py | 226 - .../proto/__init__.py | 0 .../proto/arrow.proto | 37 - .../proto/arrow_pb2.py | 170 - .../proto/arrow_pb2_grpc.py | 2 - .../bigquery_storage_v1beta1/proto/avro.proto | 38 - .../proto/avro_pb2.py | 171 - .../proto/avro_pb2_grpc.py | 2 - .../proto/read_options.proto | 41 - .../proto/read_options_pb2.py | 123 - .../proto/read_options_pb2_grpc.py | 2 - .../proto/storage.proto | 405 -- .../proto/storage_pb2.py | 1716 ------- .../proto/storage_pb2_grpc.py | 168 - .../proto/table_reference.proto | 43 - .../proto/table_reference_pb2.py | 203 - .../proto/table_reference_pb2_grpc.py | 2 - .../cloud/bigquery_storage_v1beta1/reader.py | 644 --- .../cloud/bigquery_storage_v1beta1/types.py | 55 - bigquery_storage/noxfile.py | 182 - bigquery_storage/samples/__init__.py | 0 bigquery_storage/samples/quickstart.py | 100 - bigquery_storage/samples/requirements.txt | 1 - bigquery_storage/samples/tests/__init__.py | 0 .../samples/tests/quickstart_test.py | 44 - bigquery_storage/setup.cfg | 3 - bigquery_storage/setup.py | 81 - bigquery_storage/synth.metadata | 39 - bigquery_storage/synth.py | 143 - .../tests/system/assets/people_data.csv | 6 - bigquery_storage/tests/system/conftest.py | 233 - bigquery_storage/tests/system/test_reader.py | 456 -- .../tests/system/test_reader_dataframe.py | 93 - .../test_big_query_storage_client_v1beta1.py | 261 - bigquery_storage/tests/unit/test_client.py | 90 - bigquery_storage/tests/unit/test_reader.py | 863 ---- dataproc/.coveragerc | 19 - dataproc/.flake8 | 14 - dataproc/.repo-metadata.json | 13 - dataproc/CHANGELOG.md | 158 - dataproc/LICENSE | 201 - dataproc/MANIFEST.in | 5 - dataproc/README.rst | 110 - dataproc/docs/README.rst | 1 - dataproc/docs/_static/custom.css | 4 - dataproc/docs/_templates/layout.html | 50 - dataproc/docs/changelog.md | 1 - dataproc/docs/conf.py | 363 -- dataproc/docs/gapic/v1/api.rst | 6 - dataproc/docs/gapic/v1/types.rst | 5 - dataproc/docs/gapic/v1beta2/api.rst | 6 - dataproc/docs/gapic/v1beta2/types.rst | 5 - dataproc/docs/index.rst | 22 - dataproc/google/__init__.py | 24 - dataproc/google/cloud/__init__.py | 24 - dataproc/google/cloud/dataproc.py | 33 - dataproc/google/cloud/dataproc_v1/__init__.py | 61 - .../cloud/dataproc_v1/gapic/__init__.py | 0 .../gapic/cluster_controller_client.py | 869 ---- .../gapic/cluster_controller_client_config.py | 53 - .../google/cloud/dataproc_v1/gapic/enums.py | 277 - .../gapic/job_controller_client.py | 705 --- .../gapic/job_controller_client_config.py | 53 - .../dataproc_v1/gapic/transports/__init__.py | 0 .../cluster_controller_grpc_transport.py | 204 - .../job_controller_grpc_transport.py | 189 - ...orkflow_template_service_grpc_transport.py | 249 - .../gapic/workflow_template_service_client.py | 933 ---- ...workflow_template_service_client_config.py | 58 - .../cloud/dataproc_v1/proto/__init__.py | 0 .../proto/autoscaling_policies.proto | 340 -- .../proto/autoscaling_policies_pb2.py | 1209 ----- .../proto/autoscaling_policies_pb2_grpc.py | 124 - .../cloud/dataproc_v1/proto/clusters.proto | 856 --- .../cloud/dataproc_v1/proto/clusters_pb2.py | 3885 -------------- .../dataproc_v1/proto/clusters_pb2_grpc.py | 151 - .../google/cloud/dataproc_v1/proto/jobs.proto | 792 --- .../cloud/dataproc_v1/proto/jobs_pb2.py | 4335 ---------------- .../cloud/dataproc_v1/proto/jobs_pb2_grpc.py | 140 - .../cloud/dataproc_v1/proto/operations.proto | 84 - .../cloud/dataproc_v1/proto/operations_pb2.py | 485 -- .../dataproc_v1/proto/operations_pb2_grpc.py | 2 - .../cloud/dataproc_v1/proto/shared.proto | 43 - .../cloud/dataproc_v1/proto/shared_pb2.py | 81 - .../dataproc_v1/proto/shared_pb2_grpc.py | 2 - .../proto/workflow_templates.proto | 779 --- .../proto/workflow_templates_pb2.py | 3354 ------------ .../proto/workflow_templates_pb2_grpc.py | 203 - dataproc/google/cloud/dataproc_v1/types.py | 61 - .../google/cloud/dataproc_v1beta2/__init__.py | 70 - .../cloud/dataproc_v1beta2/gapic/__init__.py | 0 .../autoscaling_policy_service_client.py | 638 --- ...utoscaling_policy_service_client_config.py | 48 - .../gapic/cluster_controller_client.py | 881 ---- .../gapic/cluster_controller_client_config.py | 53 - .../cloud/dataproc_v1beta2/gapic/enums.py | 304 -- .../gapic/job_controller_client.py | 707 --- .../gapic/job_controller_client_config.py | 53 - .../gapic/transports/__init__.py | 0 ...toscaling_policy_service_grpc_transport.py | 179 - .../cluster_controller_grpc_transport.py | 204 - .../job_controller_grpc_transport.py | 189 - ...orkflow_template_service_grpc_transport.py | 249 - .../gapic/workflow_template_service_client.py | 946 ---- ...workflow_template_service_client_config.py | 58 - .../cloud/dataproc_v1beta2/proto/__init__.py | 0 .../proto/autoscaling_policies.proto | 351 -- .../proto/autoscaling_policies_pb2.py | 1212 ----- .../proto/autoscaling_policies_pb2_grpc.py | 124 - .../dataproc_v1beta2/proto/clusters.proto | 936 ---- .../dataproc_v1beta2/proto/clusters_pb2.py | 4434 ---------------- .../proto/clusters_pb2_grpc.py | 151 - .../cloud/dataproc_v1beta2/proto/jobs.proto | 829 --- .../cloud/dataproc_v1beta2/proto/jobs_pb2.py | 4571 ----------------- .../dataproc_v1beta2/proto/jobs_pb2_grpc.py | 140 - .../dataproc_v1beta2/proto/operations.proto | 83 - .../dataproc_v1beta2/proto/operations_pb2.py | 471 -- .../proto/operations_pb2_grpc.py | 2 - .../cloud/dataproc_v1beta2/proto/shared.proto | 55 - .../dataproc_v1beta2/proto/shared_pb2.py | 97 - .../dataproc_v1beta2/proto/shared_pb2_grpc.py | 2 - .../proto/workflow_templates.proto | 787 --- .../proto/workflow_templates_pb2.py | 3381 ------------ .../proto/workflow_templates_pb2_grpc.py | 203 - .../google/cloud/dataproc_v1beta2/types.py | 68 - dataproc/noxfile.py | 160 - dataproc/setup.cfg | 3 - dataproc/setup.py | 86 - dataproc/synth.metadata | 361 -- dataproc/synth.py | 87 - .../v1/test_system_cluster_controller_v1.py | 31 - .../test_system_cluster_controller_v1beta2.py | 31 - .../v1/test_cluster_controller_client_v1.py | 413 -- .../gapic/v1/test_job_controller_client_v1.py | 355 -- ...est_workflow_template_service_client_v1.py | 390 -- ...toscaling_policy_service_client_v1beta2.py | 281 - .../test_cluster_controller_client_v1beta2.py | 413 -- .../test_job_controller_client_v1beta2.py | 363 -- ...orkflow_template_service_client_v1beta2.py | 390 -- logging/.coveragerc | 19 - logging/.flake8 | 14 - logging/.repo-metadata.json | 12 - logging/CHANGELOG.md | 275 - logging/LICENSE | 202 - logging/MANIFEST.in | 5 - logging/README.rst | 113 - logging/docs/README.rst | 1 - logging/docs/_static/custom.css | 4 - logging/docs/_templates/layout.html | 50 - logging/docs/changelog.md | 1 - logging/docs/client.rst | 6 - logging/docs/conf.py | 359 -- logging/docs/entries.rst | 7 - logging/docs/gapic/v2/api.rst | 6 - logging/docs/gapic/v2/types.rst | 5 - logging/docs/handlers-app-engine.rst | 6 - logging/docs/handlers-container-engine.rst | 6 - logging/docs/handlers.rst | 6 - logging/docs/index.rst | 19 - logging/docs/logger.rst | 6 - logging/docs/metric.rst | 6 - logging/docs/sink.rst | 6 - logging/docs/snippets.py | 434 -- logging/docs/stdlib-usage.rst | 70 - logging/docs/transports-base.rst | 6 - logging/docs/transports-sync.rst | 6 - logging/docs/transports-thread.rst | 7 - logging/docs/usage.rst | 361 -- logging/docs/v1.rst | 18 - logging/docs/v2.rst | 7 - logging/google/__init__.py | 22 - logging/google/cloud/__init__.py | 22 - logging/google/cloud/logging/__init__.py | 30 - logging/google/cloud/logging/_gapic.py | 574 --- logging/google/cloud/logging/_helpers.py | 125 - logging/google/cloud/logging/_http.py | 540 -- logging/google/cloud/logging/client.py | 400 -- logging/google/cloud/logging/entries.py | 371 -- .../google/cloud/logging/handlers/__init__.py | 27 - .../google/cloud/logging/handlers/_helpers.py | 142 - .../cloud/logging/handlers/app_engine.py | 124 - .../logging/handlers/container_engine.py | 54 - .../google/cloud/logging/handlers/handlers.py | 156 - .../logging/handlers/middleware/__init__.py | 17 - .../logging/handlers/middleware/request.py | 57 - .../logging/handlers/transports/__init__.py | 29 - .../handlers/transports/background_thread.py | 353 -- .../cloud/logging/handlers/transports/base.py | 49 - .../cloud/logging/handlers/transports/sync.py | 59 - logging/google/cloud/logging/logger.py | 384 -- logging/google/cloud/logging/metric.py | 185 - logging/google/cloud/logging/resource.py | 53 - logging/google/cloud/logging/sink.py | 220 - logging/google/cloud/logging_v2/__init__.py | 45 - .../google/cloud/logging_v2/gapic/__init__.py | 0 .../gapic/config_service_v2_client.py | 1297 ----- .../gapic/config_service_v2_client_config.py | 82 - .../google/cloud/logging_v2/gapic/enums.py | 215 - .../gapic/logging_service_v2_client.py | 846 --- .../gapic/logging_service_v2_client_config.py | 62 - .../gapic/metrics_service_v2_client.py | 674 --- .../gapic/metrics_service_v2_client_config.py | 48 - .../logging_v2/gapic/transports/__init__.py | 0 .../config_service_v2_grpc_transport.py | 257 - .../logging_service_v2_grpc_transport.py | 192 - .../metrics_service_v2_grpc_transport.py | 181 - .../google/cloud/logging_v2/proto/__init__.py | 0 .../cloud/logging_v2/proto/log_entry.proto | 207 - .../cloud/logging_v2/proto/log_entry_pb2.py | 873 ---- .../logging_v2/proto/log_entry_pb2_grpc.py | 2 - .../cloud/logging_v2/proto/logging.proto | 357 -- .../logging_v2/proto/logging_config.proto | 676 --- .../logging_v2/proto/logging_config_pb2.py | 1905 ------- .../proto/logging_config_pb2_grpc.py | 213 - .../logging_v2/proto/logging_metrics.proto | 282 - .../logging_v2/proto/logging_metrics_pb2.py | 1018 ---- .../proto/logging_metrics_pb2_grpc.py | 118 - .../cloud/logging_v2/proto/logging_pb2.py | 1312 ----- .../logging_v2/proto/logging_pb2_grpc.py | 130 - logging/google/cloud/logging_v2/types.py | 72 - logging/noxfile.py | 189 - logging/pylint.config.py | 25 - logging/setup.cfg | 3 - logging/setup.py | 90 - logging/synth.metadata | 39 - logging/synth.py | 48 - logging/tests/__init__.py | 0 .../v2/test_system_logging_service_v2_v2.py | 36 - logging/tests/system/test_system.py | 539 -- logging/tests/unit/__init__.py | 13 - .../v2/test_config_service_v2_client_v2.py | 516 -- .../v2/test_logging_service_v2_client_v2.py | 262 - .../v2/test_metrics_service_v2_client_v2.py | 288 -- logging/tests/unit/handlers/__init__.py | 13 - .../unit/handlers/middleware/test_request.py | 84 - logging/tests/unit/handlers/test__helpers.py | 219 - .../tests/unit/handlers/test_app_engine.py | 142 - .../unit/handlers/test_container_engine.py | 58 - logging/tests/unit/handlers/test_handlers.py | 152 - .../unit/handlers/transports/__init__.py | 13 - .../transports/test_background_thread.py | 536 -- .../unit/handlers/transports/test_base.py | 38 - .../unit/handlers/transports/test_sync.py | 107 - logging/tests/unit/test__gapic.py | 615 --- logging/tests/unit/test__helpers.py | 179 - logging/tests/unit/test__http.py | 852 --- logging/tests/unit/test_client.py | 738 --- logging/tests/unit/test_entries.py | 735 --- logging/tests/unit/test_logger.py | 1140 ---- logging/tests/unit/test_metric.py | 247 - logging/tests/unit/test_sink.py | 322 -- monitoring/.coveragerc | 19 - monitoring/.flake8 | 14 - monitoring/.repo-metadata.json | 12 - monitoring/CHANGELOG.md | 200 - monitoring/LICENSE | 201 - monitoring/MANIFEST.in | 5 - monitoring/README.rst | 114 - monitoring/docs/README.rst | 1 - monitoring/docs/_static/custom.css | 4 - monitoring/docs/_templates/layout.html | 50 - monitoring/docs/changelog.md | 1 - monitoring/docs/conf.py | 363 -- monitoring/docs/gapic/v3/api.rst | 6 - monitoring/docs/gapic/v3/types.rst | 5 - monitoring/docs/index.rst | 20 - monitoring/docs/query.rst | 6 - monitoring/google/__init__.py | 24 - monitoring/google/cloud/__init__.py | 24 - monitoring/google/cloud/monitoring.py | 39 - .../google/cloud/monitoring_v3/__init__.py | 86 - .../google/cloud/monitoring_v3/_dataframe.py | 147 - .../cloud/monitoring_v3/gapic/__init__.py | 0 .../gapic/alert_policy_service_client.py | 701 --- .../alert_policy_service_client_config.py | 48 - .../google/cloud/monitoring_v3/gapic/enums.py | 612 --- .../gapic/group_service_client.py | 797 --- .../gapic/group_service_client_config.py | 53 - .../gapic/metric_service_client.py | 1025 ---- .../gapic/metric_service_client_config.py | 63 - .../notification_channel_service_client.py | 1176 ----- ...ification_channel_service_client_config.py | 73 - .../service_monitoring_service_client.py | 1127 ---- ...ervice_monitoring_service_client_config.py | 73 - .../gapic/transports/__init__.py | 0 .../alert_policy_service_grpc_transport.py | 183 - .../group_service_grpc_transport.py | 192 - .../metric_service_grpc_transport.py | 223 - ...fication_channel_service_grpc_transport.py | 292 -- ...rvice_monitoring_service_grpc_transport.py | 251 - .../uptime_check_service_grpc_transport.py | 199 - .../gapic/uptime_check_service_client.py | 758 --- .../uptime_check_service_client_config.py | 53 - .../cloud/monitoring_v3/proto/__init__.py | 0 .../cloud/monitoring_v3/proto/alert.proto | 339 -- .../cloud/monitoring_v3/proto/alert_pb2.py | 1276 ----- .../monitoring_v3/proto/alert_pb2_grpc.py | 2 - .../monitoring_v3/proto/alert_service.proto | 204 - .../monitoring_v3/proto/alert_service_pb2.py | 684 --- .../proto/alert_service_pb2_grpc.py | 140 - .../cloud/monitoring_v3/proto/common.proto | 432 -- .../cloud/monitoring_v3/proto/common_pb2.py | 792 --- .../monitoring_v3/proto/common_pb2_grpc.py | 2 - .../monitoring_v3/proto/dropped_labels.proto | 45 - .../monitoring_v3/proto/dropped_labels_pb2.py | 176 - .../proto/dropped_labels_pb2_grpc.py | 2 - .../cloud/monitoring_v3/proto/group.proto | 76 - .../cloud/monitoring_v3/proto/group_pb2.py | 209 - .../monitoring_v3/proto/group_pb2_grpc.py | 2 - .../monitoring_v3/proto/group_service.proto | 234 - .../monitoring_v3/proto/group_service_pb2.py | 1022 ---- .../proto/group_service_pb2_grpc.py | 161 - .../cloud/monitoring_v3/proto/metric.proto | 96 - .../cloud/monitoring_v3/proto/metric_pb2.py | 353 -- .../monitoring_v3/proto/metric_pb2_grpc.py | 2 - .../monitoring_v3/proto/metric_service.proto | 341 -- .../monitoring_v3/proto/metric_service_pb2.py | 1613 ------ .../proto/metric_service_pb2_grpc.py | 181 - .../monitoring_v3/proto/mutation_record.proto | 36 - .../proto/mutation_record_pb2.py | 119 - .../proto/mutation_record_pb2_grpc.py | 2 - .../monitoring_v3/proto/notification.proto | 163 - .../monitoring_v3/proto/notification_pb2.py | 659 --- .../proto/notification_pb2_grpc.py | 2 - .../proto/notification_service.proto | 344 -- .../proto/notification_service_pb2.py | 1350 ----- .../proto/notification_service_pb2_grpc.py | 239 - .../cloud/monitoring_v3/proto/service.proto | 379 -- .../cloud/monitoring_v3/proto/service_pb2.py | 2072 -------- .../monitoring_v3/proto/service_pb2_grpc.py | 2 - .../monitoring_v3/proto/service_service.proto | 285 - .../proto/service_service_pb2.py | 1307 ----- .../proto/service_service_pb2_grpc.py | 212 - .../monitoring_v3/proto/span_context.proto | 43 - .../monitoring_v3/proto/span_context_pb2.py | 102 - .../proto/span_context_pb2_grpc.py | 2 - .../cloud/monitoring_v3/proto/uptime.proto | 341 -- .../cloud/monitoring_v3/proto/uptime_pb2.py | 1450 ------ .../monitoring_v3/proto/uptime_pb2_grpc.py | 2 - .../monitoring_v3/proto/uptime_service.proto | 210 - .../monitoring_v3/proto/uptime_service_pb2.py | 840 --- .../proto/uptime_service_pb2_grpc.py | 158 - .../google/cloud/monitoring_v3/query.py | 625 --- .../google/cloud/monitoring_v3/types.py | 100 - monitoring/noxfile.py | 160 - monitoring/setup.cfg | 3 - monitoring/setup.py | 86 - monitoring/synth.metadata | 369 -- monitoring/synth.py | 99 - monitoring/tests/__init__.py | 0 .../gapic/v3/test_system_metric_service_v3.py | 30 - monitoring/tests/system/test_vpcsc_v3.py | 675 --- .../v3/test_alert_policy_service_client_v3.py | 265 - .../gapic/v3/test_group_service_client_v3.py | 334 -- .../gapic/v3/test_metric_service_client_v3.py | 441 -- ..._notification_channel_service_client_v3.py | 521 -- ...st_service_monitoring_service_client_v3.py | 481 -- .../v3/test_uptime_check_service_client_v3.py | 326 -- monitoring/tests/unit/test__dataframe.py | 228 - monitoring/tests/unit/test_query.py | 576 --- phishingprotection/.coveragerc | 19 - phishingprotection/.flake8 | 14 - phishingprotection/.repo-metadata.json | 13 - phishingprotection/CHANGELOG.md | 41 - phishingprotection/LICENSE | 201 - phishingprotection/MANIFEST.in | 5 - phishingprotection/README.rst | 77 - phishingprotection/docs/README.rst | 1 - phishingprotection/docs/_static/custom.css | 4 - .../docs/_templates/layout.html | 50 - phishingprotection/docs/changelog.md | 1 - phishingprotection/docs/conf.py | 363 -- phishingprotection/docs/gapic/v1beta1/api.rst | 6 - .../docs/gapic/v1beta1/types.rst | 5 - phishingprotection/docs/index.rst | 20 - phishingprotection/google/__init__.py | 24 - phishingprotection/google/cloud/__init__.py | 24 - .../google/cloud/phishingprotection.py | 24 - .../phishingprotection_v1beta1/__init__.py | 43 - .../gapic/__init__.py | 0 .../phishing_protection_service_client.py | 279 - ...ishing_protection_service_client_config.py | 28 - .../gapic/transports/__init__.py | 0 ...shing_protection_service_grpc_transport.py | 133 - .../proto/__init__.py | 0 .../proto/phishingprotection.proto | 72 - .../proto/phishingprotection_pb2.py | 195 - .../proto/phishingprotection_pb2_grpc.py | 56 - .../cloud/phishingprotection_v1beta1/types.py | 43 - phishingprotection/noxfile.py | 160 - phishingprotection/setup.cfg | 3 - phishingprotection/setup.py | 74 - phishingprotection/synth.metadata | 147 - phishingprotection/synth.py | 96 - ...shing_protection_service_client_v1beta1.py | 104 - 472 files changed, 16 insertions(+), 130782 deletions(-) delete mode 100644 bigquery_datatransfer/.coveragerc delete mode 100644 bigquery_datatransfer/.flake8 delete mode 100644 bigquery_datatransfer/.repo-metadata.json delete mode 100644 bigquery_datatransfer/CHANGELOG.md delete mode 100644 bigquery_datatransfer/LICENSE delete mode 100644 bigquery_datatransfer/MANIFEST.in delete mode 100644 bigquery_datatransfer/README.rst delete mode 120000 bigquery_datatransfer/docs/README.rst delete mode 100644 bigquery_datatransfer/docs/_static/custom.css delete mode 100644 bigquery_datatransfer/docs/_templates/layout.html delete mode 120000 bigquery_datatransfer/docs/changelog.md delete mode 100644 bigquery_datatransfer/docs/conf.py delete mode 100644 bigquery_datatransfer/docs/gapic/v1/api.rst delete mode 100644 bigquery_datatransfer/docs/gapic/v1/types.rst delete mode 100644 bigquery_datatransfer/docs/index.rst delete mode 100644 bigquery_datatransfer/google/__init__.py delete mode 100644 bigquery_datatransfer/google/cloud/__init__.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/__init__.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/enums.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/__init__.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/__init__.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource.proto delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2_grpc.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2_grpc.py delete mode 100644 bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/types.py delete mode 100644 bigquery_datatransfer/noxfile.py delete mode 100644 bigquery_datatransfer/samples/__init__.py delete mode 100644 bigquery_datatransfer/samples/create_scheduled_query.py delete mode 100644 bigquery_datatransfer/samples/requirements.txt delete mode 100644 bigquery_datatransfer/samples/tests/__init__.py delete mode 100644 bigquery_datatransfer/samples/tests/test_create_scheduled_query.py delete mode 100644 bigquery_datatransfer/setup.cfg delete mode 100644 bigquery_datatransfer/setup.py delete mode 100644 bigquery_datatransfer/synth.metadata delete mode 100644 bigquery_datatransfer/synth.py delete mode 100644 bigquery_datatransfer/tests/system/gapic/v1/test_system_data_transfer_service_v1.py delete mode 100644 bigquery_datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py delete mode 100644 bigquery_datatransfer/tests/unit/test_shim.py delete mode 100644 bigquery_storage/.coveragerc delete mode 100644 bigquery_storage/.flake8 delete mode 100644 bigquery_storage/.gitignore delete mode 100644 bigquery_storage/.repo-metadata.json delete mode 100644 bigquery_storage/CHANGELOG.md delete mode 100644 bigquery_storage/LICENSE delete mode 100644 bigquery_storage/MANIFEST.in delete mode 100644 bigquery_storage/README.rst delete mode 120000 bigquery_storage/docs/README.rst delete mode 100644 bigquery_storage/docs/_static/custom.css delete mode 100644 bigquery_storage/docs/_templates/layout.html delete mode 100644 bigquery_storage/docs/conf.py delete mode 100644 bigquery_storage/docs/gapic/v1beta1/api.rst delete mode 100644 bigquery_storage/docs/gapic/v1beta1/reader.rst delete mode 100644 bigquery_storage/docs/gapic/v1beta1/types.rst delete mode 100644 bigquery_storage/docs/index.rst delete mode 120000 bigquery_storage/docs/samples delete mode 100644 bigquery_storage/google/__init__.py delete mode 100644 bigquery_storage/google/cloud/__init__.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/__init__.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/client.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/__init__.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client_config.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/enums.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/__init__.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/__init__.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/arrow.proto delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/arrow_pb2.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/arrow_pb2_grpc.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/avro.proto delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/avro_pb2.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/avro_pb2_grpc.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/read_options.proto delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/read_options_pb2.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/read_options_pb2_grpc.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage.proto delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2_grpc.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference.proto delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2_grpc.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/reader.py delete mode 100644 bigquery_storage/google/cloud/bigquery_storage_v1beta1/types.py delete mode 100644 bigquery_storage/noxfile.py delete mode 100644 bigquery_storage/samples/__init__.py delete mode 100644 bigquery_storage/samples/quickstart.py delete mode 100644 bigquery_storage/samples/requirements.txt delete mode 100644 bigquery_storage/samples/tests/__init__.py delete mode 100644 bigquery_storage/samples/tests/quickstart_test.py delete mode 100644 bigquery_storage/setup.cfg delete mode 100644 bigquery_storage/setup.py delete mode 100644 bigquery_storage/synth.metadata delete mode 100644 bigquery_storage/synth.py delete mode 100644 bigquery_storage/tests/system/assets/people_data.csv delete mode 100644 bigquery_storage/tests/system/conftest.py delete mode 100644 bigquery_storage/tests/system/test_reader.py delete mode 100644 bigquery_storage/tests/system/test_reader_dataframe.py delete mode 100644 bigquery_storage/tests/unit/gapic/v1beta1/test_big_query_storage_client_v1beta1.py delete mode 100644 bigquery_storage/tests/unit/test_client.py delete mode 100644 bigquery_storage/tests/unit/test_reader.py delete mode 100644 dataproc/.coveragerc delete mode 100644 dataproc/.flake8 delete mode 100644 dataproc/.repo-metadata.json delete mode 100644 dataproc/CHANGELOG.md delete mode 100644 dataproc/LICENSE delete mode 100644 dataproc/MANIFEST.in delete mode 100644 dataproc/README.rst delete mode 120000 dataproc/docs/README.rst delete mode 100644 dataproc/docs/_static/custom.css delete mode 100644 dataproc/docs/_templates/layout.html delete mode 120000 dataproc/docs/changelog.md delete mode 100644 dataproc/docs/conf.py delete mode 100644 dataproc/docs/gapic/v1/api.rst delete mode 100644 dataproc/docs/gapic/v1/types.rst delete mode 100644 dataproc/docs/gapic/v1beta2/api.rst delete mode 100644 dataproc/docs/gapic/v1beta2/types.rst delete mode 100644 dataproc/docs/index.rst delete mode 100644 dataproc/google/__init__.py delete mode 100644 dataproc/google/cloud/__init__.py delete mode 100644 dataproc/google/cloud/dataproc.py delete mode 100644 dataproc/google/cloud/dataproc_v1/__init__.py delete mode 100644 dataproc/google/cloud/dataproc_v1/gapic/__init__.py delete mode 100644 dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client.py delete mode 100644 dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client_config.py delete mode 100644 dataproc/google/cloud/dataproc_v1/gapic/enums.py delete mode 100644 dataproc/google/cloud/dataproc_v1/gapic/job_controller_client.py delete mode 100644 dataproc/google/cloud/dataproc_v1/gapic/job_controller_client_config.py delete mode 100644 dataproc/google/cloud/dataproc_v1/gapic/transports/__init__.py delete mode 100644 dataproc/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py delete mode 100644 dataproc/google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py delete mode 100644 dataproc/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py delete mode 100644 dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py delete mode 100644 dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client_config.py delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/__init__.py delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies.proto delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2_grpc.py delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/clusters.proto delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/clusters_pb2.py delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/jobs.proto delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/jobs_pb2.py delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/operations.proto delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/operations_pb2.py delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/operations_pb2_grpc.py delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/shared.proto delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/shared_pb2.py delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/shared_pb2_grpc.py delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/workflow_templates.proto delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py delete mode 100644 dataproc/google/cloud/dataproc_v1/proto/workflow_templates_pb2_grpc.py delete mode 100644 dataproc/google/cloud/dataproc_v1/types.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/__init__.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/__init__.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client_config.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client_config.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/enums.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/job_controller_client_config.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/transports/__init__.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/__init__.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2_grpc.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/clusters.proto delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/jobs.proto delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/operations.proto delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2_grpc.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/shared.proto delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/shared_pb2.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/shared_pb2_grpc.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py delete mode 100644 dataproc/google/cloud/dataproc_v1beta2/types.py delete mode 100644 dataproc/noxfile.py delete mode 100644 dataproc/setup.cfg delete mode 100644 dataproc/setup.py delete mode 100644 dataproc/synth.metadata delete mode 100644 dataproc/synth.py delete mode 100644 dataproc/tests/system/gapic/v1/test_system_cluster_controller_v1.py delete mode 100644 dataproc/tests/system/gapic/v1beta2/test_system_cluster_controller_v1beta2.py delete mode 100644 dataproc/tests/unit/gapic/v1/test_cluster_controller_client_v1.py delete mode 100644 dataproc/tests/unit/gapic/v1/test_job_controller_client_v1.py delete mode 100644 dataproc/tests/unit/gapic/v1/test_workflow_template_service_client_v1.py delete mode 100644 dataproc/tests/unit/gapic/v1beta2/test_autoscaling_policy_service_client_v1beta2.py delete mode 100644 dataproc/tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py delete mode 100644 dataproc/tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py delete mode 100644 dataproc/tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py delete mode 100644 logging/.coveragerc delete mode 100644 logging/.flake8 delete mode 100644 logging/.repo-metadata.json delete mode 100644 logging/CHANGELOG.md delete mode 100644 logging/LICENSE delete mode 100644 logging/MANIFEST.in delete mode 100644 logging/README.rst delete mode 120000 logging/docs/README.rst delete mode 100644 logging/docs/_static/custom.css delete mode 100644 logging/docs/_templates/layout.html delete mode 120000 logging/docs/changelog.md delete mode 100644 logging/docs/client.rst delete mode 100644 logging/docs/conf.py delete mode 100644 logging/docs/entries.rst delete mode 100644 logging/docs/gapic/v2/api.rst delete mode 100644 logging/docs/gapic/v2/types.rst delete mode 100644 logging/docs/handlers-app-engine.rst delete mode 100644 logging/docs/handlers-container-engine.rst delete mode 100644 logging/docs/handlers.rst delete mode 100644 logging/docs/index.rst delete mode 100644 logging/docs/logger.rst delete mode 100644 logging/docs/metric.rst delete mode 100644 logging/docs/sink.rst delete mode 100644 logging/docs/snippets.py delete mode 100644 logging/docs/stdlib-usage.rst delete mode 100644 logging/docs/transports-base.rst delete mode 100644 logging/docs/transports-sync.rst delete mode 100644 logging/docs/transports-thread.rst delete mode 100644 logging/docs/usage.rst delete mode 100644 logging/docs/v1.rst delete mode 100644 logging/docs/v2.rst delete mode 100644 logging/google/__init__.py delete mode 100644 logging/google/cloud/__init__.py delete mode 100644 logging/google/cloud/logging/__init__.py delete mode 100644 logging/google/cloud/logging/_gapic.py delete mode 100644 logging/google/cloud/logging/_helpers.py delete mode 100644 logging/google/cloud/logging/_http.py delete mode 100644 logging/google/cloud/logging/client.py delete mode 100644 logging/google/cloud/logging/entries.py delete mode 100644 logging/google/cloud/logging/handlers/__init__.py delete mode 100644 logging/google/cloud/logging/handlers/_helpers.py delete mode 100644 logging/google/cloud/logging/handlers/app_engine.py delete mode 100644 logging/google/cloud/logging/handlers/container_engine.py delete mode 100644 logging/google/cloud/logging/handlers/handlers.py delete mode 100644 logging/google/cloud/logging/handlers/middleware/__init__.py delete mode 100644 logging/google/cloud/logging/handlers/middleware/request.py delete mode 100644 logging/google/cloud/logging/handlers/transports/__init__.py delete mode 100644 logging/google/cloud/logging/handlers/transports/background_thread.py delete mode 100644 logging/google/cloud/logging/handlers/transports/base.py delete mode 100644 logging/google/cloud/logging/handlers/transports/sync.py delete mode 100644 logging/google/cloud/logging/logger.py delete mode 100644 logging/google/cloud/logging/metric.py delete mode 100644 logging/google/cloud/logging/resource.py delete mode 100644 logging/google/cloud/logging/sink.py delete mode 100644 logging/google/cloud/logging_v2/__init__.py delete mode 100644 logging/google/cloud/logging_v2/gapic/__init__.py delete mode 100644 logging/google/cloud/logging_v2/gapic/config_service_v2_client.py delete mode 100644 logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py delete mode 100644 logging/google/cloud/logging_v2/gapic/enums.py delete mode 100644 logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py delete mode 100644 logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py delete mode 100644 logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py delete mode 100644 logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py delete mode 100644 logging/google/cloud/logging_v2/gapic/transports/__init__.py delete mode 100644 logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py delete mode 100644 logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py delete mode 100644 logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py delete mode 100644 logging/google/cloud/logging_v2/proto/__init__.py delete mode 100644 logging/google/cloud/logging_v2/proto/log_entry.proto delete mode 100644 logging/google/cloud/logging_v2/proto/log_entry_pb2.py delete mode 100644 logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py delete mode 100644 logging/google/cloud/logging_v2/proto/logging.proto delete mode 100644 logging/google/cloud/logging_v2/proto/logging_config.proto delete mode 100644 logging/google/cloud/logging_v2/proto/logging_config_pb2.py delete mode 100644 logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py delete mode 100644 logging/google/cloud/logging_v2/proto/logging_metrics.proto delete mode 100644 logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py delete mode 100644 logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py delete mode 100644 logging/google/cloud/logging_v2/proto/logging_pb2.py delete mode 100644 logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py delete mode 100644 logging/google/cloud/logging_v2/types.py delete mode 100644 logging/noxfile.py delete mode 100644 logging/pylint.config.py delete mode 100644 logging/setup.cfg delete mode 100644 logging/setup.py delete mode 100644 logging/synth.metadata delete mode 100644 logging/synth.py delete mode 100644 logging/tests/__init__.py delete mode 100644 logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py delete mode 100644 logging/tests/system/test_system.py delete mode 100644 logging/tests/unit/__init__.py delete mode 100644 logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py delete mode 100644 logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py delete mode 100644 logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py delete mode 100644 logging/tests/unit/handlers/__init__.py delete mode 100644 logging/tests/unit/handlers/middleware/test_request.py delete mode 100644 logging/tests/unit/handlers/test__helpers.py delete mode 100644 logging/tests/unit/handlers/test_app_engine.py delete mode 100644 logging/tests/unit/handlers/test_container_engine.py delete mode 100644 logging/tests/unit/handlers/test_handlers.py delete mode 100644 logging/tests/unit/handlers/transports/__init__.py delete mode 100644 logging/tests/unit/handlers/transports/test_background_thread.py delete mode 100644 logging/tests/unit/handlers/transports/test_base.py delete mode 100644 logging/tests/unit/handlers/transports/test_sync.py delete mode 100644 logging/tests/unit/test__gapic.py delete mode 100644 logging/tests/unit/test__helpers.py delete mode 100644 logging/tests/unit/test__http.py delete mode 100644 logging/tests/unit/test_client.py delete mode 100644 logging/tests/unit/test_entries.py delete mode 100644 logging/tests/unit/test_logger.py delete mode 100644 logging/tests/unit/test_metric.py delete mode 100644 logging/tests/unit/test_sink.py delete mode 100644 monitoring/.coveragerc delete mode 100644 monitoring/.flake8 delete mode 100644 monitoring/.repo-metadata.json delete mode 100644 monitoring/CHANGELOG.md delete mode 100644 monitoring/LICENSE delete mode 100644 monitoring/MANIFEST.in delete mode 100644 monitoring/README.rst delete mode 120000 monitoring/docs/README.rst delete mode 100644 monitoring/docs/_static/custom.css delete mode 100644 monitoring/docs/_templates/layout.html delete mode 120000 monitoring/docs/changelog.md delete mode 100644 monitoring/docs/conf.py delete mode 100644 monitoring/docs/gapic/v3/api.rst delete mode 100644 monitoring/docs/gapic/v3/types.rst delete mode 100644 monitoring/docs/index.rst delete mode 100644 monitoring/docs/query.rst delete mode 100644 monitoring/google/__init__.py delete mode 100644 monitoring/google/cloud/__init__.py delete mode 100644 monitoring/google/cloud/monitoring.py delete mode 100644 monitoring/google/cloud/monitoring_v3/__init__.py delete mode 100644 monitoring/google/cloud/monitoring_v3/_dataframe.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/__init__.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/alert_policy_service_client.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/alert_policy_service_client_config.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/enums.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/group_service_client.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/group_service_client_config.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/metric_service_client.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/metric_service_client_config.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/notification_channel_service_client.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/notification_channel_service_client_config.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/service_monitoring_service_client.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/service_monitoring_service_client_config.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/transports/__init__.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/transports/alert_policy_service_grpc_transport.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/transports/group_service_grpc_transport.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/transports/metric_service_grpc_transport.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/transports/notification_channel_service_grpc_transport.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/transports/service_monitoring_service_grpc_transport.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/transports/uptime_check_service_grpc_transport.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/uptime_check_service_client.py delete mode 100644 monitoring/google/cloud/monitoring_v3/gapic/uptime_check_service_client_config.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/__init__.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/alert.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/alert_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/alert_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/alert_service.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/alert_service_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/alert_service_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/common.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/common_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/common_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/dropped_labels.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/dropped_labels_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/dropped_labels_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/group.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/group_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/group_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/group_service.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/group_service_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/group_service_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/metric.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/metric_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/metric_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/metric_service.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/metric_service_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/metric_service_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/mutation_record.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/mutation_record_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/mutation_record_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/notification.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/notification_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/notification_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/notification_service.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/notification_service_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/notification_service_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/service.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/service_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/service_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/service_service.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/service_service_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/service_service_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/span_context.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/span_context_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/span_context_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/uptime.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/uptime_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/uptime_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/uptime_service.proto delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/uptime_service_pb2.py delete mode 100644 monitoring/google/cloud/monitoring_v3/proto/uptime_service_pb2_grpc.py delete mode 100644 monitoring/google/cloud/monitoring_v3/query.py delete mode 100644 monitoring/google/cloud/monitoring_v3/types.py delete mode 100644 monitoring/noxfile.py delete mode 100644 monitoring/setup.cfg delete mode 100644 monitoring/setup.py delete mode 100644 monitoring/synth.metadata delete mode 100644 monitoring/synth.py delete mode 100644 monitoring/tests/__init__.py delete mode 100644 monitoring/tests/system/gapic/v3/test_system_metric_service_v3.py delete mode 100644 monitoring/tests/system/test_vpcsc_v3.py delete mode 100644 monitoring/tests/unit/gapic/v3/test_alert_policy_service_client_v3.py delete mode 100644 monitoring/tests/unit/gapic/v3/test_group_service_client_v3.py delete mode 100644 monitoring/tests/unit/gapic/v3/test_metric_service_client_v3.py delete mode 100644 monitoring/tests/unit/gapic/v3/test_notification_channel_service_client_v3.py delete mode 100644 monitoring/tests/unit/gapic/v3/test_service_monitoring_service_client_v3.py delete mode 100644 monitoring/tests/unit/gapic/v3/test_uptime_check_service_client_v3.py delete mode 100644 monitoring/tests/unit/test__dataframe.py delete mode 100644 monitoring/tests/unit/test_query.py delete mode 100644 phishingprotection/.coveragerc delete mode 100644 phishingprotection/.flake8 delete mode 100644 phishingprotection/.repo-metadata.json delete mode 100644 phishingprotection/CHANGELOG.md delete mode 100644 phishingprotection/LICENSE delete mode 100644 phishingprotection/MANIFEST.in delete mode 100644 phishingprotection/README.rst delete mode 120000 phishingprotection/docs/README.rst delete mode 100644 phishingprotection/docs/_static/custom.css delete mode 100644 phishingprotection/docs/_templates/layout.html delete mode 120000 phishingprotection/docs/changelog.md delete mode 100644 phishingprotection/docs/conf.py delete mode 100644 phishingprotection/docs/gapic/v1beta1/api.rst delete mode 100644 phishingprotection/docs/gapic/v1beta1/types.rst delete mode 100644 phishingprotection/docs/index.rst delete mode 100644 phishingprotection/google/__init__.py delete mode 100644 phishingprotection/google/cloud/__init__.py delete mode 100644 phishingprotection/google/cloud/phishingprotection.py delete mode 100644 phishingprotection/google/cloud/phishingprotection_v1beta1/__init__.py delete mode 100644 phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/__init__.py delete mode 100644 phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/phishing_protection_service_client.py delete mode 100644 phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/phishing_protection_service_client_config.py delete mode 100644 phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/transports/__init__.py delete mode 100644 phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/transports/phishing_protection_service_grpc_transport.py delete mode 100644 phishingprotection/google/cloud/phishingprotection_v1beta1/proto/__init__.py delete mode 100644 phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection.proto delete mode 100644 phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2.py delete mode 100644 phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2_grpc.py delete mode 100644 phishingprotection/google/cloud/phishingprotection_v1beta1/types.py delete mode 100644 phishingprotection/noxfile.py delete mode 100644 phishingprotection/setup.cfg delete mode 100644 phishingprotection/setup.py delete mode 100644 phishingprotection/synth.metadata delete mode 100644 phishingprotection/synth.py delete mode 100644 phishingprotection/tests/unit/gapic/v1beta1/test_phishing_protection_service_client_v1beta1.py diff --git a/README.rst b/README.rst index 34b1cfaafe1f..506863cdd4c3 100644 --- a/README.rst +++ b/README.rst @@ -91,7 +91,7 @@ The following client libraries have **GA** support: .. _Scheduler Documentation: https://googleapis.dev/python/cloudscheduler/latest .. _Stackdriver Logging: https://pypi.org/project/google-cloud-logging/ -.. _Logging README: https://github.com/googleapis/google-cloud-python/tree/master/logging +.. _Logging README: https://github.com/googleapis/python-logging .. _Logging Documentation: https://googleapis.dev/python/logging/latest Beta Support @@ -150,7 +150,8 @@ updates. See `versioning`_ for more details. The following client libraries have **alpha** support: - `Google Cloud AutoML`_ (`AutoML README`_, `AutoML Documentation`_) -- `Google BigQuery Data Transfer`_ (`BigQuery Data Transfer README`_, `BigQuery Documentation`_) +- `Google BigQuery Data Transfer`_ (`BigQuery Data Transfer README`_, `BigQuery Data Transfer Documentation`_) +- `Google BigQuery Storage`_ (`BigQuery Storage README`_, `BigQuery Storage Documentation`) - `Google Cloud Bigtable - HappyBase`_ (`HappyBase README`_, `HappyBase Documentation`_) - `Google Cloud Build`_ (`Cloud Build README`_, `Cloud Build Documentation`_) - `Google Cloud Container`_ (`Container README`_, `Container Documentation`_) @@ -161,6 +162,7 @@ The following client libraries have **alpha** support: - `Google Cloud IAM`_ (`IAM README`_, `IAM Documentation`_) - `Google Cloud IoT`_ (`IoT README`_, `IoT Documentation`_) - `Google Cloud Memorystore for Redis`_ (`Redis README`_, `Redis Documentation`_) +- `Google Phishing Protection`_ (`Phishing Protection README`_, `Phishing Protection Documentation`_) - `Google Cloud Recommender`_ (`Recommender README`_, `Recommender Documentation`_) - `Google Cloud Resource Manager`_ (`Resource Manager README`_, `Resource Manager Documentation`_) - `Google Cloud Runtime Configuration`_ (`Runtime Config README`_, `Runtime Config Documentation`_) @@ -178,8 +180,12 @@ The following client libraries have **alpha** support: .. _AutoML Documentation: https://googleapis.dev/python/automl/latest .. _Google BigQuery Data Transfer: https://pypi.org/project/google-cloud-bigquery-datatransfer/ -.. _BigQuery Data Transfer README: https://github.com/googleapis/google-cloud-python/tree/master/bigquery_datatransfer -.. _BigQuery Documentation: https://googleapis.dev/python/bigquery/latest +.. _BigQuery Data Transfer README: https://github.com/googleapis/python-bigquery-datatransfer +.. _BigQuery Data Transfer Documentation: https://googleapis.dev/python/bigquerydatatransfer/latest/index.html + +.. _Google BigQuery Storage: https://pypi.org/project/google-cloud-bigquery-storage/ +.. _BigQuery Storage README: https://github.com/googleapis/python-bigquery-storage/ +.. _BigQuery Storage Documentation: https://googleapis.dev/python/bigquerystorage/latest/index.html .. _Google Cloud Bigtable - HappyBase: https://pypi.org/project/google-cloud-happybase/ .. _HappyBase README: https://github.com/googleapis/google-cloud-python-happybase @@ -198,7 +204,7 @@ The following client libraries have **alpha** support: .. _Container Analysis Documentation: https://googleapis.dev/python/containeranalysis/latest .. _Google Cloud Dataproc: https://pypi.org/project/google-cloud-dataproc/ -.. _Dataproc README: https://github.com/googleapis/google-cloud-python/tree/master/dataproc +.. _Dataproc README: https://github.com/googleapis/python-dataproc .. _Dataproc Documentation: https://googleapis.dev/python/dataproc/latest .. _Google Cloud DLP: https://pypi.org/project/google-cloud-dlp/ @@ -221,6 +227,10 @@ The following client libraries have **alpha** support: .. _Redis README: https://github.com/googleapis/python-redis .. _Redis Documentation: https://googleapis.dev/python/redis/latest +.. _Google Phishing Protection: https://pypi.org/project/google-cloud-phishing-protection/ +.. _Phishing Protection README: https://github.com/googleapis/python-phishingprotection +.. _Phishing Protection Documentation: https://googleapis.dev/python/phishingprotection/latest + .. _Google Cloud Recommender: https://pypi.org/project/google-cloud-recommender/ .. _Recommender README: https://github.com/googleapis/google-cloud-python/tree/master/recommender .. _Recommender Documentation: https://googleapis.dev/python/recommender/latest @@ -258,7 +268,7 @@ The following client libraries have **alpha** support: .. _Error Reporting Documentation: https://googleapis.dev/python/clouderrorreporting/latest .. _Stackdriver Monitoring: https://pypi.org/project/google-cloud-monitoring/ -.. _Monitoring README: https://github.com/googleapis/google-cloud-python/tree/master/monitoring +.. _Monitoring README: https://github.com/googleapis/python-monitoring .. _Monitoring Documentation: https://googleapis.dev/python/monitoring/latest .. _Webrisk: https://pypi.org/project/google-cloud-webrisk diff --git a/bigquery_datatransfer/.coveragerc b/bigquery_datatransfer/.coveragerc deleted file mode 100644 index b178b094aa1d..000000000000 --- a/bigquery_datatransfer/.coveragerc +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[run] -branch = True - -[report] -fail_under = 100 -show_missing = True -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file diff --git a/bigquery_datatransfer/.flake8 b/bigquery_datatransfer/.flake8 deleted file mode 100644 index 0268ecc9c55c..000000000000 --- a/bigquery_datatransfer/.flake8 +++ /dev/null @@ -1,14 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - *_pb2.py - - # Standard linting exemptions. - __pycache__, - .git, - *.pyc, - conf.py diff --git a/bigquery_datatransfer/.repo-metadata.json b/bigquery_datatransfer/.repo-metadata.json deleted file mode 100644 index c128b1bcfc27..000000000000 --- a/bigquery_datatransfer/.repo-metadata.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "bigquerydatatransfer", - "name_pretty": "Google BigQuery Data Transfer Service", - "product_documentation": "https://cloud.google.com/bigquery/transfer/", - "client_documentation": "https://googleapis.dev/python/bigquerydatatransfer/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", - "release_level": "alpha", - "language": "python", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-bigquery-datatransfer", - "api_id": "bigquerydatatransfer.googleapis.com", - "requires_billing": true -} \ No newline at end of file diff --git a/bigquery_datatransfer/CHANGELOG.md b/bigquery_datatransfer/CHANGELOG.md deleted file mode 100644 index 483716f73f63..000000000000 --- a/bigquery_datatransfer/CHANGELOG.md +++ /dev/null @@ -1,132 +0,0 @@ -# Changelog - -[PyPI History][1] - -[1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history - -## 0.4.1 - -07-31-2019 17:50 PDT - - -### Dependencies -- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) - -### Documentation -- Fix links to BigQuery Datatransfer documentation. ([#8859](https://github.com/googleapis/google-cloud-python/pull/8859)) -- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) - -### Internal / Testing Changes -- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) - -## 0.4.0 - -07-16-2019 17:11 PDT - -### Implementation Changes - -- Retry DEADLINE_EXCEEDED (via synth). ([#7920](https://github.com/googleapis/google-cloud-python/pull/7920)) -- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) - -### New Features - -- Add `DatasourceServiceClient` (via synth). ([#8630](https://github.com/googleapis/google-cloud-python/pull/8630)) -- Add `start_manual_transfer_runs` method (via synth). ([#8630](https://github.com/googleapis/google-cloud-python/pull/8630)) -- Add `client_info`/`version_info` support (via synth). ([#8630](https://github.com/googleapis/google-cloud-python/pull/8630)) -- Allow passing kwargs to `create_channel` (via synth). ([#8630](https://github.com/googleapis/google-cloud-python/pull/8630)) -- Add path helpers (via synth). ([#8630](https://github.com/googleapis/google-cloud-python/pull/8630)) -- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) - -### Documentation - -- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) -- Adjust indentation on scheduled query sample. ([#8493](https://github.com/googleapis/google-cloud-python/pull/8493)) -- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) -- Add sample to schedule query with BQ DTS. ([#7703](https://github.com/googleapis/google-cloud-python/pull/7703)) -- Add nox session `docs` (via synth). ([#7765](https://github.com/googleapis/google-cloud-python/pull/7765)) -- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) -- Pick up stub docstring fix in GAPIC generator. ([#6965](https://github.com/googleapis/google-cloud-python/pull/6965)) - -### Internal / Testing Changes - -- Blacken noxfile.py, setup.py (via synth). ([#8116](https://github.com/googleapis/google-cloud-python/pull/8116)) -- Add empty lines (via synth). ([#8050](https://github.com/googleapis/google-cloud-python/pull/8050)) -- Remove unused message exports (via synth). ([#7263](https://github.com/googleapis/google-cloud-python/pull/7263)) -- Protoc-generated serialization update. ([#7075](https://github.com/googleapis/google-cloud-python/pull/7075)) - -## 0.3.0 - -12-17-2018 17:59 PST - - -### Implementation Changes -- Pick up enum fixes in the GAPIC generator. ([#6608](https://github.com/googleapis/google-cloud-python/pull/6608)) -- Pick up fixes in GAPIC generator. ([#6491](https://github.com/googleapis/google-cloud-python/pull/6491)) -- Fix `client_info` bug, update docstrings. ([#6405](https://github.com/googleapis/google-cloud-python/pull/6405)) -- Re-generate library using bigquery_datatransfer/synth.py ([#5973](https://github.com/googleapis/google-cloud-python/pull/5973)) -- Fix stray, lint-breaking blank lines from autosynth. ([#5960](https://github.com/googleapis/google-cloud-python/pull/5960)) -- Re-generate library using `bigquery_datatransfer/synth.py`. ([#5947](https://github.com/googleapis/google-cloud-python/pull/5947)) - -### Dependencies -- Bump minimum api_core version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391)) - -### Documentation -- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) -- Fix GAX fossils ([#6264](https://github.com/googleapis/google-cloud-python/pull/6264)) -- Normalize use of support level badges ([#6159](https://github.com/googleapis/google-cloud-python/pull/6159)) -- Harmonize / DRY 'README.rst' / 'docs/index.rst'. ([#6013](https://github.com/googleapis/google-cloud-python/pull/6013)) - -### Internal / Testing Changes -- Update noxfile. -- Blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) -- Omit local deps ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) -- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) -- Unblack bigquery gapic and protos. -- Run Black on Generated libraries ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) -- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) -- Add synth metadata. ([#6562](https://github.com/googleapis/google-cloud-python/pull/6562)) -- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) - -## 0.2.0 - -### Implementation Changes -- Regenerate bigquery-datatransfer (#5793) - -### Internal / Testing Changes -- Avoid overwriting '__module__' of messages from shared modules. (#5364) -- Modify system tests to use prerelease versions of grpcio (#5304) -- Add Test runs for Python 3.7 and remove 3.4 (#5295) -- Fix bad trove classifier -- Rename releases to changelog and include from CHANGELOG.md (#5191) - -## 0.1.1 - -### Dependencies - -- Update dependency range for api-core to include v1.0.0 releases (#4944) - -### Documentation - -- Fix package name in readme (#4670) -- BigQueryDataTransfer: update 404 link for API documentation (#4672) -- Replacing references to `stable/` docs with `latest/`. (#4638) - -### Testing and internal changes - -- Re-enable lint for tests, remove usage of pylint (#4921) -- Normalize all setup.py files (#4909) -- Update index.rst (#4816) -- nox unittest updates (#4646) - -## 0.1.0 - -[![release level](https://img.shields.io/badge/release%20level-alpha-orange.svg?style=flat)](https://cloud.google.com/terms/launch-stages) - -The BigQuery Data Transfer Service automates data movement from SaaS -applications to Google BigQuery on a scheduled, managed basis. Your analytics -team can lay the foundation for a data warehouse without writing a single line -of code. BigQuery Data Transfer Service initially supports Google application -sources like Adwords, DoubleClick Campaign Manager, DoubleClick for Publishers -and YouTube. - -PyPI: https://pypi.org/project/google-cloud-bigquery-datatransfer/0.1.0/ diff --git a/bigquery_datatransfer/LICENSE b/bigquery_datatransfer/LICENSE deleted file mode 100644 index a8ee855de2aa..000000000000 --- a/bigquery_datatransfer/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - https://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/bigquery_datatransfer/MANIFEST.in b/bigquery_datatransfer/MANIFEST.in deleted file mode 100644 index 9cbf175afe6b..000000000000 --- a/bigquery_datatransfer/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include README.rst LICENSE -recursive-include google *.json *.proto -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/bigquery_datatransfer/README.rst b/bigquery_datatransfer/README.rst deleted file mode 100644 index bb4b5a10ef06..000000000000 --- a/bigquery_datatransfer/README.rst +++ /dev/null @@ -1,111 +0,0 @@ -Python Client for BigQuery Data Transfer API -============================================ - -|alpha| |pypi| |versions| - -The `BigQuery Data Transfer API`_ allows users to transfer data from partner -SaaS applications to Google BigQuery on a scheduled, managed basis. - -- `Client Library Documentation`_ -- `Product Documentation`_ - -.. |alpha| image:: https://img.shields.io/badge/support-alpha-orange.svg - :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#alpha-support -.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-datatransfer.svg - :target: https://pypi.org/project/google-cloud-bigquery-datatransfer/ -.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery-datatransfer.svg - :target: https://pypi.org/project/google-cloud-bigquery-datatransfer/ -.. _BigQuery Data Transfer API: https://cloud.google.com/bigquery/transfer -.. _Client Library Documentation: https://googleapis.dev/python/bigquerydatatransfer/latest -.. _Product Documentation: https://cloud.google.com/bigquery/docs/transfer-service-overview - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable the BigQuery Data Transfer API.`_ -3. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable the BigQuery Data Transfer API.: https://cloud.google.com/bigquery/docs/transfer-service-overview -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Supported Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 - -Deprecated Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - source /bin/activate - /bin/pip install google-cloud-bigquery-datatransfer - - -Windows -^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-bigquery-datatransfer - -Example Usage -~~~~~~~~~~~~~ - -DataTransferServiceClient -^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. code:: py - - from google.cloud.bigquery import datatransfer_v1 - - client = datatransfer_v1.DataTransferServiceClient() - - parent = client.location_path('[PROJECT]', '[LOCATION]') - - - # Iterate over all results - for element in client.list_data_sources(parent): - # process element - pass - - # Or iterate over results one page at a time - for page in client.list_data_sources(parent).pages: - for element in page: - # process element - pass - -Next Steps -~~~~~~~~~~ - -- Read the `Client Library Documentation`_ for BigQuery Data Transfer API - API to see other available methods on the client. -- Read the `Product documentation`_ to learn - more about the product and see How-to Guides. diff --git a/bigquery_datatransfer/docs/README.rst b/bigquery_datatransfer/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/bigquery_datatransfer/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/bigquery_datatransfer/docs/_static/custom.css b/bigquery_datatransfer/docs/_static/custom.css deleted file mode 100644 index 0abaf229fce3..000000000000 --- a/bigquery_datatransfer/docs/_static/custom.css +++ /dev/null @@ -1,4 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} \ No newline at end of file diff --git a/bigquery_datatransfer/docs/_templates/layout.html b/bigquery_datatransfer/docs/_templates/layout.html deleted file mode 100644 index 228529efe2d2..000000000000 --- a/bigquery_datatransfer/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/bigquery_datatransfer/docs/changelog.md b/bigquery_datatransfer/docs/changelog.md deleted file mode 120000 index 05ed7b33f590..000000000000 --- a/bigquery_datatransfer/docs/changelog.md +++ /dev/null @@ -1 +0,0 @@ -../../bigquery_datatransfer/CHANGELOG.md \ No newline at end of file diff --git a/bigquery_datatransfer/docs/conf.py b/bigquery_datatransfer/docs/conf.py deleted file mode 100644 index c222cb1b6c17..000000000000 --- a/bigquery_datatransfer/docs/conf.py +++ /dev/null @@ -1,356 +0,0 @@ -# -*- coding: utf-8 -*- -# -# google-cloud-bigquerydatatransfer documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquerydatatransfer" -copyright = u"2017, Google" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquerydatatransfer-doc" - -# -- Options for warnings ------------------------------------------------------ - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - #'preamble': '', - # Latex figure (float) alignment - #'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-bigquerydatatransfer.tex", - u"google-cloud-bigquerydatatransfer Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-bigquerydatatransfer", - u"google-cloud-bigquerydatatransfer Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-bigquerydatatransfer", - u"google-cloud-bigquerydatatransfer Documentation", - author, - "google-cloud-bigquerydatatransfer", - "GAPIC library for the {metadata.shortName} v1 service", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), - "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), - "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), -} - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/bigquery_datatransfer/docs/gapic/v1/api.rst b/bigquery_datatransfer/docs/gapic/v1/api.rst deleted file mode 100644 index a8b855bb4cc4..000000000000 --- a/bigquery_datatransfer/docs/gapic/v1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for BigQuery Data Transfer API -===================================== - -.. automodule:: google.cloud.bigquery_datatransfer_v1 - :members: - :inherited-members: \ No newline at end of file diff --git a/bigquery_datatransfer/docs/gapic/v1/types.rst b/bigquery_datatransfer/docs/gapic/v1/types.rst deleted file mode 100644 index 2a77ee73ecef..000000000000 --- a/bigquery_datatransfer/docs/gapic/v1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for BigQuery Data Transfer API Client -=========================================== - -.. automodule:: google.cloud.bigquery_datatransfer_v1.types - :members: \ No newline at end of file diff --git a/bigquery_datatransfer/docs/index.rst b/bigquery_datatransfer/docs/index.rst deleted file mode 100644 index 942e2634213c..000000000000 --- a/bigquery_datatransfer/docs/index.rst +++ /dev/null @@ -1,22 +0,0 @@ -.. include:: README.rst - - -API Reference -------------- - -.. toctree:: - :maxdepth: 2 - - gapic/v1/api - gapic/v1/types - - -Changelog ---------- - -For a list of all ``google-cloud-bigquery-bigquery-datatransfer`` releases: - -.. toctree:: - :maxdepth: 2 - - changelog diff --git a/bigquery_datatransfer/google/__init__.py b/bigquery_datatransfer/google/__init__.py deleted file mode 100644 index 9a1b64a6d586..000000000000 --- a/bigquery_datatransfer/google/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/bigquery_datatransfer/google/cloud/__init__.py b/bigquery_datatransfer/google/cloud/__init__.py deleted file mode 100644 index 9a1b64a6d586..000000000000 --- a/bigquery_datatransfer/google/cloud/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer.py deleted file mode 100644 index e0dccd353d54..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import - -from google.cloud.bigquery_datatransfer_v1 import DataTransferServiceClient -from google.cloud.bigquery_datatransfer_v1 import enums -from google.cloud.bigquery_datatransfer_v1 import types - - -__all__ = ("enums", "types", "DataTransferServiceClient") diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py deleted file mode 100644 index 82b0ad0f6fb9..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys -import warnings - -from google.cloud.bigquery_datatransfer_v1 import types -from google.cloud.bigquery_datatransfer_v1.gapic import data_transfer_service_client -from google.cloud.bigquery_datatransfer_v1.gapic import enums - - -if sys.version_info[:2] == (2, 7): - message = ( - "A future version of this library will drop support for Python 2.7." - "More details about Python 2 support for Google Cloud Client Libraries" - "can be found at https://cloud.google.com/python/docs/python2-sunset/" - ) - warnings.warn(message, DeprecationWarning) - - -class DataTransferServiceClient(data_transfer_service_client.DataTransferServiceClient): - __doc__ = data_transfer_service_client.DataTransferServiceClient.__doc__ - enums = enums - - -__all__ = ("enums", "types", "DataTransferServiceClient") diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/__init__.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py deleted file mode 100644 index 6db2c2fa444b..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py +++ /dev/null @@ -1,1561 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.bigquery.datatransfer.v1 DataTransferService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.path_template -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import google.api_core.protobuf_helpers -import grpc - -from google.cloud.bigquery_datatransfer_v1.gapic import ( - data_transfer_service_client_config, -) -from google.cloud.bigquery_datatransfer_v1.gapic import enums -from google.cloud.bigquery_datatransfer_v1.gapic.transports import ( - data_transfer_service_grpc_transport, -) -from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2 -from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2_grpc -from google.cloud.bigquery_datatransfer_v1.proto import transfer_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-bigquery-datatransfer" -).version - - -class DataTransferServiceClient(object): - """ - The Google BigQuery Data Transfer Service API enables BigQuery users to - configure the transfer of their data from other Google Products into - BigQuery. This service contains methods that are end user exposed. It backs - up the frontend. - """ - - SERVICE_ADDRESS = "bigquerydatatransfer.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.bigquery.datatransfer.v1.DataTransferService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTransferServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def location_path(cls, project, location): - """Return a fully-qualified location string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}", - project=project, - location=location, - ) - - @classmethod - def location_data_source_path(cls, project, location, data_source): - """Return a fully-qualified location_data_source string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/dataSources/{data_source}", - project=project, - location=location, - data_source=data_source, - ) - - @classmethod - def location_run_path(cls, project, location, transfer_config, run): - """Return a fully-qualified location_run string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}", - project=project, - location=location, - transfer_config=transfer_config, - run=run, - ) - - @classmethod - def location_transfer_config_path(cls, project, location, transfer_config): - """Return a fully-qualified location_transfer_config string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/transferConfigs/{transfer_config}", - project=project, - location=location, - transfer_config=transfer_config, - ) - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - - @classmethod - def project_data_source_path(cls, project, data_source): - """Return a fully-qualified project_data_source string.""" - return google.api_core.path_template.expand( - "projects/{project}/dataSources/{data_source}", - project=project, - data_source=data_source, - ) - - @classmethod - def project_run_path(cls, project, transfer_config, run): - """Return a fully-qualified project_run string.""" - return google.api_core.path_template.expand( - "projects/{project}/transferConfigs/{transfer_config}/runs/{run}", - project=project, - transfer_config=transfer_config, - run=run, - ) - - @classmethod - def project_transfer_config_path(cls, project, transfer_config): - """Return a fully-qualified project_transfer_config string.""" - return google.api_core.path_template.expand( - "projects/{project}/transferConfigs/{transfer_config}", - project=project, - transfer_config=transfer_config, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.DataTransferServiceGrpcTransport, - Callable[[~.Credentials, type], ~.DataTransferServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = data_transfer_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=data_transfer_service_grpc_transport.DataTransferServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = data_transfer_service_grpc_transport.DataTransferServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def get_data_source( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Retrieves a supported data source and returns its settings, - which can be used for UI rendering. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_data_source_path('[PROJECT]', '[DATA_SOURCE]') - >>> - >>> response = client.get_data_source(name) - - Args: - name (str): Required. The field will contain name of the resource requested, for - example: ``projects/{project_id}/dataSources/{data_source_id}`` or - ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.DataSource` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_data_source" not in self._inner_api_calls: - self._inner_api_calls[ - "get_data_source" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_data_source, - default_retry=self._method_configs["GetDataSource"].retry, - default_timeout=self._method_configs["GetDataSource"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.GetDataSourceRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_data_source"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_data_sources( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists supported data sources and returns their settings, - which can be used for UI rendering. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_data_sources(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_data_sources(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The BigQuery project id for which data sources should be - returned. Must be in the form: ``projects/{project_id}`` or - \`projects/{project\_id}/locations/{location\_id} - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.bigquery_datatransfer_v1.types.DataSource` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_data_sources" not in self._inner_api_calls: - self._inner_api_calls[ - "list_data_sources" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_data_sources, - default_retry=self._method_configs["ListDataSources"].retry, - default_timeout=self._method_configs["ListDataSources"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.ListDataSourcesRequest( - parent=parent, page_size=page_size - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_data_sources"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="data_sources", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def create_transfer_config( - self, - parent, - transfer_config, - authorization_code=None, - version_info=None, - service_account_name=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new data transfer configuration. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `transfer_config`: - >>> transfer_config = {} - >>> - >>> response = client.create_transfer_config(parent, transfer_config) - - Args: - parent (str): Required. The BigQuery project id where the transfer configuration - should be created. Must be in the format - projects/{project\_id}/locations/{location\_id} or - projects/{project\_id}. If specified location and location of the - destination bigquery dataset do not match - the request will fail. - transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Required. Data transfer configuration to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` - authorization_code (str): Optional OAuth2 authorization code to use with this transfer - configuration. This is required if new credentials are needed, as - indicated by ``CheckValidCreds``. In order to obtain - authorization\_code, please make a request to - https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client\_id=&scope=&redirect\_uri= - - - client\_id should be OAuth client\_id of BigQuery DTS API for the - given data source returned by ListDataSources method. - - data\_source\_scopes are the scopes returned by ListDataSources - method. - - redirect\_uri is an optional parameter. If not specified, then - authorization code is posted to the opener of authorization flow - window. Otherwise it will be sent to the redirect uri. A special - value of urn:ietf:wg:oauth:2.0:oob means that authorization code - should be returned in the title bar of the browser, with the page - text prompting the user to copy the code and paste it in the - application. - version_info (str): Optional version info. If users want to find a very recent access token, - that is, immediately after approving access, users have to set the - version\_info claim in the token request. To obtain the version\_info, - users must use the "none+gsession" response type. which be return a - version\_info back in the authorization response which be be put in a - JWT claim in the token request. - service_account_name (str): Optional service account name. If this field is set, transfer config will - be created with this service account credentials. It requires that - requesting user calling this API has permissions to act as this service - account. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_transfer_config" not in self._inner_api_calls: - self._inner_api_calls[ - "create_transfer_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_transfer_config, - default_retry=self._method_configs["CreateTransferConfig"].retry, - default_timeout=self._method_configs["CreateTransferConfig"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.CreateTransferConfigRequest( - parent=parent, - transfer_config=transfer_config, - authorization_code=authorization_code, - version_info=version_info, - service_account_name=service_account_name, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_transfer_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_transfer_config( - self, - transfer_config, - update_mask, - authorization_code=None, - version_info=None, - service_account_name=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a data transfer configuration. - All fields must be set, even if they are not updated. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> # TODO: Initialize `transfer_config`: - >>> transfer_config = {} - >>> - >>> # TODO: Initialize `update_mask`: - >>> update_mask = {} - >>> - >>> response = client.update_transfer_config(transfer_config, update_mask) - - Args: - transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Required. Data transfer configuration to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` - update_mask (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.FieldMask]): Required. Required list of fields to be updated in this request. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.FieldMask` - authorization_code (str): Optional OAuth2 authorization code to use with this transfer - configuration. If it is provided, the transfer configuration will be - associated with the authorizing user. In order to obtain - authorization\_code, please make a request to - https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client\_id=&scope=&redirect\_uri= - - - client\_id should be OAuth client\_id of BigQuery DTS API for the - given data source returned by ListDataSources method. - - data\_source\_scopes are the scopes returned by ListDataSources - method. - - redirect\_uri is an optional parameter. If not specified, then - authorization code is posted to the opener of authorization flow - window. Otherwise it will be sent to the redirect uri. A special - value of urn:ietf:wg:oauth:2.0:oob means that authorization code - should be returned in the title bar of the browser, with the page - text prompting the user to copy the code and paste it in the - application. - version_info (str): Optional version info. If users want to find a very recent access token, - that is, immediately after approving access, users have to set the - version\_info claim in the token request. To obtain the version\_info, - users must use the "none+gsession" response type. which be return a - version\_info back in the authorization response which be be put in a - JWT claim in the token request. - service_account_name (str): Optional service account name. If this field is set and - "service\_account\_name" is set in update\_mask, transfer config will be - updated to use this service account credentials. It requires that - requesting user calling this API has permissions to act as this service - account. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_transfer_config" not in self._inner_api_calls: - self._inner_api_calls[ - "update_transfer_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_transfer_config, - default_retry=self._method_configs["UpdateTransferConfig"].retry, - default_timeout=self._method_configs["UpdateTransferConfig"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.UpdateTransferConfigRequest( - transfer_config=transfer_config, - update_mask=update_mask, - authorization_code=authorization_code, - version_info=version_info, - service_account_name=service_account_name, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("transfer_config.name", transfer_config.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_transfer_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_transfer_config( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a data transfer configuration, - including any associated transfer runs and logs. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') - >>> - >>> client.delete_transfer_config(name) - - Args: - name (str): Required. The field will contain name of the resource requested, for - example: ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_transfer_config" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_transfer_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_transfer_config, - default_retry=self._method_configs["DeleteTransferConfig"].retry, - default_timeout=self._method_configs["DeleteTransferConfig"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.DeleteTransferConfigRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_transfer_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_transfer_config( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns information about a data transfer config. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') - >>> - >>> response = client.get_transfer_config(name) - - Args: - name (str): Required. The field will contain name of the resource requested, for - example: ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_transfer_config" not in self._inner_api_calls: - self._inner_api_calls[ - "get_transfer_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_transfer_config, - default_retry=self._method_configs["GetTransferConfig"].retry, - default_timeout=self._method_configs["GetTransferConfig"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.GetTransferConfigRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_transfer_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_transfer_configs( - self, - parent, - data_source_ids=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns information about all data transfers in the project. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_transfer_configs(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_transfer_configs(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The BigQuery project id for which data sources should be - returned: ``projects/{project_id}`` or - ``projects/{project_id}/locations/{location_id}`` - data_source_ids (list[str]): When specified, only configurations of requested data sources are returned. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_transfer_configs" not in self._inner_api_calls: - self._inner_api_calls[ - "list_transfer_configs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_transfer_configs, - default_retry=self._method_configs["ListTransferConfigs"].retry, - default_timeout=self._method_configs["ListTransferConfigs"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.ListTransferConfigsRequest( - parent=parent, data_source_ids=data_source_ids, page_size=page_size - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_transfer_configs"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="transfer_configs", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def schedule_transfer_runs( - self, - parent, - start_time, - end_time, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates transfer runs for a time range [start\_time, end\_time]. For - each date - or whatever granularity the data source supports - in the - range, one transfer run is created. Note that runs are created per UTC - time in the time range. DEPRECATED: use StartManualTransferRuns instead. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> parent = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') - >>> - >>> # TODO: Initialize `start_time`: - >>> start_time = {} - >>> - >>> # TODO: Initialize `end_time`: - >>> end_time = {} - >>> - >>> response = client.schedule_transfer_runs(parent, start_time, end_time) - - Args: - parent (str): Required. Transfer configuration name in the form: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - start_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Required. Start time of the range of transfer runs. For example, - ``"2017-05-25T00:00:00+00:00"``. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.Timestamp` - end_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Required. End time of the range of transfer runs. For example, - ``"2017-05-30T00:00:00+00:00"``. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.Timestamp` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "schedule_transfer_runs" not in self._inner_api_calls: - self._inner_api_calls[ - "schedule_transfer_runs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.schedule_transfer_runs, - default_retry=self._method_configs["ScheduleTransferRuns"].retry, - default_timeout=self._method_configs["ScheduleTransferRuns"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.ScheduleTransferRunsRequest( - parent=parent, start_time=start_time, end_time=end_time - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["schedule_transfer_runs"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_transfer_run( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns information about the particular transfer run. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_run_path('[PROJECT]', '[TRANSFER_CONFIG]', '[RUN]') - >>> - >>> response = client.get_transfer_run(name) - - Args: - name (str): Required. The field will contain name of the resource requested, for - example: - ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferRun` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_transfer_run" not in self._inner_api_calls: - self._inner_api_calls[ - "get_transfer_run" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_transfer_run, - default_retry=self._method_configs["GetTransferRun"].retry, - default_timeout=self._method_configs["GetTransferRun"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.GetTransferRunRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_transfer_run"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_transfer_run( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes the specified transfer run. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_run_path('[PROJECT]', '[TRANSFER_CONFIG]', '[RUN]') - >>> - >>> client.delete_transfer_run(name) - - Args: - name (str): Required. The field will contain name of the resource requested, for - example: - ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_transfer_run" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_transfer_run" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_transfer_run, - default_retry=self._method_configs["DeleteTransferRun"].retry, - default_timeout=self._method_configs["DeleteTransferRun"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.DeleteTransferRunRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_transfer_run"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_transfer_runs( - self, - parent, - states=None, - page_size=None, - run_attempt=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns information about running and completed jobs. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> parent = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') - >>> - >>> # Iterate over all results - >>> for element in client.list_transfer_runs(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_transfer_runs(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. Name of transfer configuration for which transfer runs should - be retrieved. Format of transfer configuration resource name is: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - states (list[~google.cloud.bigquery_datatransfer_v1.types.TransferState]): When specified, only transfer runs with requested states are returned. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - run_attempt (~google.cloud.bigquery_datatransfer_v1.types.RunAttempt): Indicates how run attempts are to be pulled. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferRun` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_transfer_runs" not in self._inner_api_calls: - self._inner_api_calls[ - "list_transfer_runs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_transfer_runs, - default_retry=self._method_configs["ListTransferRuns"].retry, - default_timeout=self._method_configs["ListTransferRuns"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.ListTransferRunsRequest( - parent=parent, states=states, page_size=page_size, run_attempt=run_attempt - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_transfer_runs"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="transfer_runs", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def list_transfer_logs( - self, - parent, - page_size=None, - message_types=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns user facing log messages for the data transfer run. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> parent = client.project_run_path('[PROJECT]', '[TRANSFER_CONFIG]', '[RUN]') - >>> - >>> # Iterate over all results - >>> for element in client.list_transfer_logs(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_transfer_logs(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. Transfer run name in the form: - ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - message_types (list[~google.cloud.bigquery_datatransfer_v1.types.MessageSeverity]): Message types to return. If not populated - INFO, WARNING and ERROR - messages are returned. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferMessage` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_transfer_logs" not in self._inner_api_calls: - self._inner_api_calls[ - "list_transfer_logs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_transfer_logs, - default_retry=self._method_configs["ListTransferLogs"].retry, - default_timeout=self._method_configs["ListTransferLogs"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.ListTransferLogsRequest( - parent=parent, page_size=page_size, message_types=message_types - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_transfer_logs"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="transfer_messages", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def check_valid_creds( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns true if valid credentials exist for the given data source and - requesting user. - Some data sources doesn't support service account, so we need to talk to - them on behalf of the end user. This API just checks whether we have OAuth - token for the particular user, which is a pre-requisite before user can - create a transfer config. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_data_source_path('[PROJECT]', '[DATA_SOURCE]') - >>> - >>> response = client.check_valid_creds(name) - - Args: - name (str): Required. The data source in the form: - ``projects/{project_id}/dataSources/{data_source_id}`` or - ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "check_valid_creds" not in self._inner_api_calls: - self._inner_api_calls[ - "check_valid_creds" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.check_valid_creds, - default_retry=self._method_configs["CheckValidCreds"].retry, - default_timeout=self._method_configs["CheckValidCreds"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.CheckValidCredsRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["check_valid_creds"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def start_manual_transfer_runs( - self, - parent=None, - requested_time_range=None, - requested_run_time=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Start manual transfer runs to be executed now with schedule\_time equal - to current time. The transfer runs can be created for a time range where - the run\_time is between start\_time (inclusive) and end\_time - (exclusive), or for a specific run\_time. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> response = client.start_manual_transfer_runs() - - Args: - parent (str): Transfer configuration name in the form: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - requested_time_range (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TimeRange]): Time range for the transfer runs that should be started. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.TimeRange` - requested_run_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Specific run\_time for a transfer run to be started. The - requested\_run\_time must not be in the future. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.Timestamp` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "start_manual_transfer_runs" not in self._inner_api_calls: - self._inner_api_calls[ - "start_manual_transfer_runs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.start_manual_transfer_runs, - default_retry=self._method_configs["StartManualTransferRuns"].retry, - default_timeout=self._method_configs["StartManualTransferRuns"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - requested_time_range=requested_time_range, - requested_run_time=requested_run_time, - ) - - request = datatransfer_pb2.StartManualTransferRunsRequest( - parent=parent, - requested_time_range=requested_time_range, - requested_run_time=requested_run_time, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["start_manual_transfer_runs"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py deleted file mode 100644 index 28a9494f22ef..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py +++ /dev/null @@ -1,93 +0,0 @@ -config = { - "interfaces": { - "google.cloud.bigquery.datatransfer.v1.DataTransferService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "GetDataSource": { - "timeout_millis": 20000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListDataSources": { - "timeout_millis": 20000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateTransferConfig": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateTransferConfig": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteTransferConfig": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetTransferConfig": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListTransferConfigs": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ScheduleTransferRuns": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetTransferRun": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteTransferRun": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListTransferRuns": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListTransferLogs": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CheckValidCreds": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "StartManualTransferRuns": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/enums.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/enums.py deleted file mode 100644 index c3e7c6e71cd8..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/enums.py +++ /dev/null @@ -1,164 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class NullValue(enum.IntEnum): - """ - ``NullValue`` is a singleton enumeration to represent the null value for - the ``Value`` type union. - - The JSON representation for ``NullValue`` is JSON ``null``. - - Attributes: - NULL_VALUE (int): Null value. - """ - - NULL_VALUE = 0 - - -class TransferState(enum.IntEnum): - """ - Represents data transfer run state. - - Attributes: - TRANSFER_STATE_UNSPECIFIED (int): State placeholder. - PENDING (int): Data transfer is scheduled and is waiting to be picked up by - data transfer backend. - RUNNING (int): Data transfer is in progress. - SUCCEEDED (int): Data transfer completed successfully. - FAILED (int): Data transfer failed. - CANCELLED (int): Data transfer is cancelled. - """ - - TRANSFER_STATE_UNSPECIFIED = 0 - PENDING = 2 - RUNNING = 3 - SUCCEEDED = 4 - FAILED = 5 - CANCELLED = 6 - - -class TransferType(enum.IntEnum): - """ - DEPRECATED. Represents data transfer type. - - Attributes: - TRANSFER_TYPE_UNSPECIFIED (int): Invalid or Unknown transfer type placeholder. - BATCH (int): Batch data transfer. - STREAMING (int): Streaming data transfer. Streaming data source currently doesn't - support multiple transfer configs per project. - """ - - TRANSFER_TYPE_UNSPECIFIED = 0 - BATCH = 1 - STREAMING = 2 - - -class DataSource(object): - class AuthorizationType(enum.IntEnum): - """ - The type of authorization needed for this data source. - - Attributes: - AUTHORIZATION_TYPE_UNSPECIFIED (int): Type unspecified. - AUTHORIZATION_CODE (int): Use OAuth 2 authorization codes that can be exchanged - for a refresh token on the backend. - GOOGLE_PLUS_AUTHORIZATION_CODE (int): Return an authorization code for a given Google+ page that can then be - exchanged for a refresh token on the backend. - """ - - AUTHORIZATION_TYPE_UNSPECIFIED = 0 - AUTHORIZATION_CODE = 1 - GOOGLE_PLUS_AUTHORIZATION_CODE = 2 - - class DataRefreshType(enum.IntEnum): - """ - Represents how the data source supports data auto refresh. - - Attributes: - DATA_REFRESH_TYPE_UNSPECIFIED (int): The data source won't support data auto refresh, which is default value. - SLIDING_WINDOW (int): The data source supports data auto refresh, and runs will be scheduled - for the past few days. Does not allow custom values to be set for each - transfer config. - CUSTOM_SLIDING_WINDOW (int): The data source supports data auto refresh, and runs will be scheduled - for the past few days. Allows custom values to be set for each transfer - config. - """ - - DATA_REFRESH_TYPE_UNSPECIFIED = 0 - SLIDING_WINDOW = 1 - CUSTOM_SLIDING_WINDOW = 2 - - -class DataSourceParameter(object): - class Type(enum.IntEnum): - """ - Parameter type. - - Attributes: - TYPE_UNSPECIFIED (int): Type unspecified. - STRING (int): String parameter. - INTEGER (int): Integer parameter (64-bits). - Will be serialized to json as string. - DOUBLE (int): Double precision floating point parameter. - BOOLEAN (int): Boolean parameter. - RECORD (int): Deprecated. This field has no effect. - PLUS_PAGE (int): Page ID for a Google+ Page. - """ - - TYPE_UNSPECIFIED = 0 - STRING = 1 - INTEGER = 2 - DOUBLE = 3 - BOOLEAN = 4 - RECORD = 5 - PLUS_PAGE = 6 - - -class ListTransferRunsRequest(object): - class RunAttempt(enum.IntEnum): - """ - Represents which runs should be pulled. - - Attributes: - RUN_ATTEMPT_UNSPECIFIED (int): All runs should be returned. - LATEST (int): Only latest run per day should be returned. - """ - - RUN_ATTEMPT_UNSPECIFIED = 0 - LATEST = 1 - - -class TransferMessage(object): - class MessageSeverity(enum.IntEnum): - """ - Represents data transfer user facing message severity. - - Attributes: - MESSAGE_SEVERITY_UNSPECIFIED (int): No severity specified. - INFO (int): Informational message. - WARNING (int): Warning message. - ERROR (int): Error message. - """ - - MESSAGE_SEVERITY_UNSPECIFIED = 0 - INFO = 1 - WARNING = 2 - ERROR = 3 diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/__init__.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py deleted file mode 100644 index 840fcbeb10d5..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py +++ /dev/null @@ -1,313 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2_grpc - - -class DataTransferServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.bigquery.datatransfer.v1 DataTransferService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, - channel=None, - credentials=None, - address="bigquerydatatransfer.googleapis.com:443", - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "data_transfer_service_stub": datatransfer_pb2_grpc.DataTransferServiceStub( - channel - ) - } - - @classmethod - def create_channel( - cls, - address="bigquerydatatransfer.googleapis.com:443", - credentials=None, - **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def get_data_source(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.get_data_source`. - - Retrieves a supported data source and returns its settings, - which can be used for UI rendering. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].GetDataSource - - @property - def list_data_sources(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.list_data_sources`. - - Lists supported data sources and returns their settings, - which can be used for UI rendering. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].ListDataSources - - @property - def create_transfer_config(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.create_transfer_config`. - - Creates a new data transfer configuration. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].CreateTransferConfig - - @property - def update_transfer_config(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.update_transfer_config`. - - Updates a data transfer configuration. - All fields must be set, even if they are not updated. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].UpdateTransferConfig - - @property - def delete_transfer_config(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.delete_transfer_config`. - - Deletes a data transfer configuration, - including any associated transfer runs and logs. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].DeleteTransferConfig - - @property - def get_transfer_config(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.get_transfer_config`. - - Returns information about a data transfer config. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].GetTransferConfig - - @property - def list_transfer_configs(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.list_transfer_configs`. - - Returns information about all data transfers in the project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].ListTransferConfigs - - @property - def schedule_transfer_runs(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.schedule_transfer_runs`. - - Creates transfer runs for a time range [start\_time, end\_time]. For - each date - or whatever granularity the data source supports - in the - range, one transfer run is created. Note that runs are created per UTC - time in the time range. DEPRECATED: use StartManualTransferRuns instead. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].ScheduleTransferRuns - - @property - def get_transfer_run(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.get_transfer_run`. - - Returns information about the particular transfer run. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].GetTransferRun - - @property - def delete_transfer_run(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.delete_transfer_run`. - - Deletes the specified transfer run. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].DeleteTransferRun - - @property - def list_transfer_runs(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.list_transfer_runs`. - - Returns information about running and completed jobs. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].ListTransferRuns - - @property - def list_transfer_logs(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.list_transfer_logs`. - - Returns user facing log messages for the data transfer run. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].ListTransferLogs - - @property - def check_valid_creds(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.check_valid_creds`. - - Returns true if valid credentials exist for the given data source and - requesting user. - Some data sources doesn't support service account, so we need to talk to - them on behalf of the end user. This API just checks whether we have OAuth - token for the particular user, which is a pre-requisite before user can - create a transfer config. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].CheckValidCreds - - @property - def start_manual_transfer_runs(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.start_manual_transfer_runs`. - - Start manual transfer runs to be executed now with schedule\_time equal - to current time. The transfer runs can be created for a time range where - the run\_time is between start\_time (inclusive) and end\_time - (exclusive), or for a specific run\_time. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].StartManualTransferRuns diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/__init__.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource.proto b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource.proto deleted file mode 100644 index d7400a55935d..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource.proto +++ /dev/null @@ -1,542 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.datatransfer.v1; - -import "google/api/annotations.proto"; -import "google/cloud/bigquery/datatransfer/v1/datatransfer.proto"; -import "google/cloud/bigquery/datatransfer/v1/transfer.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; -import "google/api/client.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer"; -option java_multiple_files = true; -option java_outer_classname = "DataSourceProto"; -option java_package = "com.google.cloud.bigquery.datatransfer.v1"; -option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1"; - -// The Google BigQuery Data Transfer API allows BigQuery users to -// configure transfer of their data from other Google Products into BigQuery. -// This service exposes methods that should be used by data source backend. -service DataSourceService { - option (google.api.default_host) = "bigquerydatatransfer.googleapis.com"; - - // Update a transfer run. If successful, resets - // data_source.update_deadline_seconds timer. - rpc UpdateTransferRun(UpdateTransferRunRequest) returns (TransferRun) { - option (google.api.http) = { - patch: "/v1/{transfer_run.name=projects/*/locations/*/transferConfigs/*/runs/*}" - body: "transfer_run" - }; - } - - // Log messages for a transfer run. If successful (at least 1 message), resets - // data_source.update_deadline_seconds timer. - rpc LogTransferRunMessages(LogTransferRunMessagesRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:logMessages" - body: "*" - }; - } - - // Notify the Data Transfer Service that data is ready for loading. - // The Data Transfer Service will start and monitor multiple BigQuery Load - // jobs for a transfer run. Monitored jobs will be automatically retried - // and produce log messages when starting and finishing a job. - // Can be called multiple times for the same transfer run. - rpc StartBigQueryJobs(StartBigQueryJobsRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:startBigQueryJobs" - body: "*" - }; - } - - // Notify the Data Transfer Service that the data source is done processing - // the run. No more status updates or requests to start/monitor jobs will be - // accepted. The run will be finalized by the Data Transfer Service when all - // monitored jobs are completed. - // Does not need to be called if the run is set to FAILED. - rpc FinishRun(FinishRunRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:finishRun" - body: "*" - }; - } - - // Creates a data source definition. Calling this method will automatically - // use your credentials to create the following Google Cloud resources in - // YOUR Google Cloud project. - // 1. OAuth client - // 2. Pub/Sub Topics and Subscriptions in each supported_location_ids. e.g., - // projects/{project_id}/{topics|subscriptions}/bigquerydatatransfer.{data_source_id}.{location_id}.run - // The field data_source.client_id should be left empty in the input request, - // as the API will create a new OAuth client on behalf of the caller. On the - // other hand data_source.scopes usually need to be set when there are OAuth - // scopes that need to be granted by end users. - // 3. We need a longer deadline due to the 60 seconds SLO from Pub/Sub admin - // Operations. This also applies to update and delete data source definition. - rpc CreateDataSourceDefinition(CreateDataSourceDefinitionRequest) returns (DataSourceDefinition) { - option (google.api.http) = { - post: "/v1/{parent=projects/*/locations/*}/dataSourceDefinitions" - body: "data_source_definition" - }; - } - - // Updates an existing data source definition. If changing - // supported_location_ids, triggers same effects as mentioned in "Create a - // data source definition." - rpc UpdateDataSourceDefinition(UpdateDataSourceDefinitionRequest) returns (DataSourceDefinition) { - option (google.api.http) = { - patch: "/v1/{data_source_definition.name=projects/*/locations/*/dataSourceDefinitions/*}" - body: "data_source_definition" - }; - } - - // Deletes a data source definition, all of the transfer configs associated - // with this data source definition (if any) must be deleted first by the user - // in ALL regions, in order to delete the data source definition. - // This method is primarily meant for deleting data sources created during - // testing stage. - // If the data source is referenced by transfer configs in the region - // specified in the request URL, the method will fail immediately. If in the - // current region (e.g., US) it's not used by any transfer configs, but in - // another region (e.g., EU) it is, then although the method will succeed in - // region US, but it will fail when the deletion operation is replicated to - // region EU. And eventually, the system will replicate the data source - // definition back from EU to US, in order to bring all regions to - // consistency. The final effect is that the data source appears to be - // 'undeleted' in the US region. - rpc DeleteDataSourceDefinition(DeleteDataSourceDefinitionRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/{name=projects/*/locations/*/dataSourceDefinitions/*}" - }; - } - - // Retrieves an existing data source definition. - rpc GetDataSourceDefinition(GetDataSourceDefinitionRequest) returns (DataSourceDefinition) { - option (google.api.http) = { - get: "/v1/{name=projects/*/locations/*/dataSourceDefinitions/*}" - }; - } - - // Lists supported data source definitions. - rpc ListDataSourceDefinitions(ListDataSourceDefinitionsRequest) returns (ListDataSourceDefinitionsResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*/locations/*}/dataSourceDefinitions" - }; - } -} - -// Describes data which should be imported. -message ImportedDataInfo { - // Defines schema of a field in the imported data. - message FieldSchema { - // LINT.IfChange - // Field type. - enum Type { - // Illegal value. - TYPE_UNSPECIFIED = 0; - - // 64K, UTF8. - STRING = 1; - - // 64-bit signed. - INTEGER = 2; - - // 64-bit IEEE floating point. - FLOAT = 3; - - // Aggregate type. - RECORD = 4; - - // 64K, Binary. - BYTES = 5; - - // 2-valued. - BOOLEAN = 6; - - // 64-bit signed usec since UTC epoch. - TIMESTAMP = 7; - - // Civil date - Year, Month, Day. - DATE = 8; - - // Civil time - Hour, Minute, Second, Microseconds. - TIME = 9; - - // Combination of civil date and civil time. - DATETIME = 10; - - // Numeric type with 38 decimal digits of precision and 9 decimal digits - // of scale. - NUMERIC = 11; - - // Geography object (go/googlesql_geography). - GEOGRAPHY = 12; - } - - // Field name. Matches: [A-Za-z_][A-Za-z_0-9]{0,127} - string field_name = 1; - - // Field type - Type type = 2; - - // Is field repeated. - bool is_repeated = 3; - - // Description for this field. - string description = 4; - - // Present iff type == RECORD. - RecordSchema schema = 5; - } - - // Describes schema of the data to be ingested. - message RecordSchema { - // One field per column in the record. - repeated FieldSchema fields = 1; - } - - // External table definition. These tables can be referenced with 'name' - // in the query and can be read just like any other table. - message TableDefinition { - // CSV specific options. - message CsvOptions { - // The delimiter. We currently restrict this to U+0001 to U+00FF and - // apply additional constraints during validation. - google.protobuf.StringValue field_delimiter = 1; - - // Whether CSV files are allowed to have quoted newlines. If quoted - // newlines are allowed, we can't split CSV files. - google.protobuf.BoolValue allow_quoted_newlines = 2; - - // The quote character. We currently restrict this to U+0000 to U+00FF - // and apply additional constraints during validation. Set to '\0' to - // indicate no quote is used. - google.protobuf.StringValue quote_char = 3; - - // Number of leading rows to skip. - google.protobuf.Int64Value skip_leading_rows = 4; - - // Accept rows that are missing trailing optional columns. - google.protobuf.BoolValue allow_jagged_rows = 5; - } - - // BigQuery table_id (required). This will be used to reference this - // table in the query. - string table_id = 1; - - // URIs for the data to be imported. All URIs must be from the same storage - // system. - repeated string source_uris = 2; - - // Describes the format of the data in source_uri. - Format format = 3; - - // Specify the maximum number of bad records that can be ignored. - // If bad records exceed this threshold the query is aborted. - int32 max_bad_records = 4; - - // Character encoding of the input when applicable (CSV, JSON). - // Defaults to UTF8. - Encoding encoding = 5; - - // CSV specific options. - CsvOptions csv_options = 6; - - // Optional schema for the data. When not specified for JSON and CSV formats - // we will try to detect it automatically. - RecordSchema schema = 7; - - // Indicates if extra values that are not represented in the table schema is - // allowed. - google.protobuf.BoolValue ignore_unknown_values = 10; - } - - // Data format. - enum Format { - // Unspecified format. In this case, we have to infer the format from the - // data source. - FORMAT_UNSPECIFIED = 0; - - // CSV format. - CSV = 1; - - // Newline-delimited JSON. - JSON = 2; - - // Avro format. See http://avro.apache.org . - AVRO = 3; - - // RecordIO. - RECORDIO = 4; - - // ColumnIO. - COLUMNIO = 5; - - // Capacitor. - CAPACITOR = 6; - - // Parquet format. See https://parquet.apache.org . - PARQUET = 7; - - // ORC format. See https://orc.apache.org . - ORC = 8; - } - - // Encoding of input data in CSV/JSON format. - enum Encoding { - // Default encoding (UTF8). - ENCODING_UNSPECIFIED = 0; - - // ISO_8859_1 encoding. - ISO_8859_1 = 1; - - // UTF8 encoding. - UTF8 = 2; - } - - // SQL query to run. When empty, API checks that there is only one - // table_def specified and loads this table. Only Standard SQL queries - // are accepted. Legacy SQL is not allowed. - string sql = 1; - - // Table where results should be written. - string destination_table_id = 2; - - // The description of a destination table. This can be several sentences - // or paragraphs describing the table contents in detail. - string destination_table_description = 10; - - // When used WITHOUT the "sql" parameter, describes the schema of the - // destination table. - // When used WITH the "sql" parameter, describes tables with data stored - // outside of BigQuery. - repeated TableDefinition table_defs = 3; - - // Inline code for User-defined function resources. - // Ignored when "sql" parameter is empty. - repeated string user_defined_functions = 4; - - // Specifies the action if the destination table already exists. - WriteDisposition write_disposition = 6; -} - -// A request to update a transfer run. -message UpdateTransferRunRequest { - // Run name must be set and correspond to an already existing run. Only - // state, error_status, and data_version fields will be updated. All other - // fields will be ignored. - TransferRun transfer_run = 1; - - // Required list of fields to be updated in this request. - google.protobuf.FieldMask update_mask = 2; -} - -// A request to add transfer status messages to the run. -message LogTransferRunMessagesRequest { - // Name of the resource in the form: - // "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}" - string name = 1; - - // Messages to append. - repeated TransferMessage transfer_messages = 2; -} - -// A request to start and monitor a BigQuery load job. -message StartBigQueryJobsRequest { - // Name of the resource in the form: - // "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}" - string name = 1; - - // Import jobs which should be started and monitored. - repeated ImportedDataInfo imported_data = 2; - - // User credentials which should be used to start/monitor - // BigQuery jobs. If not specified, then jobs - // are started using data source service account credentials. - // This may be OAuth token or JWT token. - bytes user_credentials = 3; - - // The number of BQ Jobs that can run in parallel. - int32 max_parallelism = 8; -} - -// A request to finish a run. -message FinishRunRequest { - // Name of the resource in the form: - // "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}" - string name = 1; -} - -// Represents the request of the CreateDataSourceDefinition method. -message CreateDataSourceDefinitionRequest { - // The BigQuery project id for which data source definition is associated. - // Must be in the form: `projects/{project_id}/locations/{location_id}` - string parent = 1; - - // Data source definition. - DataSourceDefinition data_source_definition = 2; -} - -// Represents the request of the UpdateDataSourceDefinition method. -message UpdateDataSourceDefinitionRequest { - // Data source definition. - DataSourceDefinition data_source_definition = 1; - - // Update field mask. - google.protobuf.FieldMask update_mask = 2; -} - -// Represents the request of the DeleteDataSourceDefinition method. All transfer -// configs associated with the data source must be deleted first, before the -// data source can be deleted. -message DeleteDataSourceDefinitionRequest { - // The field will contain name of the resource requested, for example: - // `projects/{project_id}/locations/{location_id}/dataSourceDefinitions/{data_source_id}` - string name = 1; -} - -// Represents the request of the GetDataSourceDefinition method. -message GetDataSourceDefinitionRequest { - // The field will contain name of the resource requested. - string name = 1; -} - -// Options for writing to the table. -// The WRITE_EMPTY option is intentionally excluded from the enum and is not -// supported by the data transfer service. -enum WriteDisposition { - // The default writeDispostion - WRITE_DISPOSITION_UNSPECIFIED = 0; - - // overwrites the table data. - WRITE_TRUNCATE = 1; - - // the data is appended to the table. - // Note duplication might happen if this mode is used. - WRITE_APPEND = 2; -} - -// Represents the request of the ListDataSourceDefinitions method. -message ListDataSourceDefinitionsRequest { - // The BigQuery project id for which data sources should be returned. - // Must be in the form: `projects/{project_id}/locations/{location_id}` - string parent = 1; - - // Pagination token, which can be used to request a specific page - // of `ListDataSourceDefinitionsRequest` list results. For multiple-page - // results, `ListDataSourceDefinitionsResponse` outputs a `next_page` token, - // which can be used as the `page_token` value to request the next page of - // the list results. - string page_token = 2; - - // Page size. The default page size is the maximum value of 1000 results. - int32 page_size = 3; -} - -// Returns a list of supported data source definitions. -message ListDataSourceDefinitionsResponse { - // List of supported data source definitions. - repeated DataSourceDefinition data_source_definitions = 1; - - // Output only. The next-pagination token. For multiple-page list results, - // this token can be used as the - // `ListDataSourceDefinitionsRequest.page_token` - // to request the next page of the list results. - string next_page_token = 2; -} - -// Represents the data source definition. -message DataSourceDefinition { - // The resource name of the data source definition. - // Data source definition names have the form - // `projects/{project_id}/locations/{location}/dataSourceDefinitions/{data_source_id}`. - string name = 21; - - // Data source metadata. - DataSource data_source = 1; - - // The Pub/Sub topic to be used for broadcasting a message when a transfer run - // is created. Both this topic and transfer_config_pubsub_topic can be - // set to a custom topic. By default, both topics are auto-generated if none - // of them is provided when creating the definition. However, if one topic is - // manually set, the other topic has to be manually set as well. The only - // difference is that transfer_run_pubsub_topic must be a non-empty Pub/Sub - // topic, but transfer_config_pubsub_topic can be set to empty. The comments - // about "{location}" for transfer_config_pubsub_topic apply here too. - string transfer_run_pubsub_topic = 13; - - // Duration which should be added to schedule_time to calculate - // run_time when job is scheduled. Only applicable for automatically - // scheduled transfer runs. Used to start a run early on a data source that - // supports continuous data refresh to compensate for unknown timezone - // offsets. Use a negative number to start a run late for data sources not - // supporting continuous data refresh. - google.protobuf.Duration run_time_offset = 16; - - // Support e-mail address of the OAuth client's Brand, which contains the - // consent screen data. - string support_email = 22; - - // When service account is specified, BigQuery will share created dataset - // with the given service account. Also, this service account will be - // eligible to perform status updates and message logging for data transfer - // runs for the corresponding data_source_id. - string service_account = 2; - - // Is data source disabled? If true, data_source is not visible. - // API will also stop returning any data transfer configs and/or runs - // associated with the data source. This setting has higher priority - // than whitelisted_project_ids. - bool disabled = 5; - - // The Pub/Sub topic to use for broadcasting a message for transfer config. If - // empty, a message will not be broadcasted. Both this topic and - // transfer_run_pubsub_topic are auto-generated if none of them is provided - // when creating the definition. It is recommended to provide - // transfer_config_pubsub_topic if a user-owned transfer_run_pubsub_topic is - // provided. Otherwise, it will be set to empty. If "{location}" is found in - // the value, then that means, data source wants to handle message separately - // for datasets in different regions. We will replace {location} with the - // actual dataset location, as the actual topic name. For example, - // projects/connector/topics/scheduler-{location} could become - // projects/connector/topics/scheduler-us. If "{location}" is not found, then - // we will use the input value as topic name. - string transfer_config_pubsub_topic = 12; - - // Supported location_ids used for deciding in which locations Pub/Sub topics - // need to be created. If custom Pub/Sub topics are used and they contains - // '{location}', the location_ids will be used for validating the topics by - // replacing the '{location}' with the individual location in the list. The - // valid values are the "location_id" field of the response of `GET - // https://bigquerydatatransfer.googleapis.com/v1/{name=projects/*}/locations` - // In addition, if the data source needs to support all available regions, - // supported_location_ids can be set to "global" (a single string element). - // When "global" is specified: - // 1) the data source implementation is supposed to stage the data in proper - // region of the destination dataset; - // 2) Data source developer should be aware of the implications (e.g., network - // traffic latency, potential charge associated with cross-region traffic, - // etc.) of supporting the "global" region; - repeated string supported_location_ids = 23; -} diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2.py deleted file mode 100644 index 82c7e654e364..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2.py +++ /dev/null @@ -1,2221 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery/datatransfer_v1/proto/datasource.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.bigquery.datatransfer_v1.proto import ( - datatransfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2, -) -from google.cloud.bigquery.datatransfer_v1.proto import ( - transfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery/datatransfer_v1/proto/datasource.proto", - package="google.cloud.bigquery.datatransfer.v1", - syntax="proto3", - serialized_options=_b( - "\n)com.google.cloud.bigquery.datatransfer.v1B\017DataSourceProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1" - ), - serialized_pb=_b( - '\ngoogle/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto\x1a:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/api/client.proto"\x9e\x0e\n\x10ImportedDataInfo\x12\x0b\n\x03sql\x18\x01 \x01(\t\x12\x1c\n\x14\x64\x65stination_table_id\x18\x02 \x01(\t\x12%\n\x1d\x64\x65stination_table_description\x18\n \x01(\t\x12[\n\ntable_defs\x18\x03 \x03(\x0b\x32G.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition\x12\x1e\n\x16user_defined_functions\x18\x04 \x03(\t\x12R\n\x11write_disposition\x18\x06 \x01(\x0e\x32\x37.google.cloud.bigquery.datatransfer.v1.WriteDisposition\x1a\xad\x03\n\x0b\x46ieldSchema\x12\x12\n\nfield_name\x18\x01 \x01(\t\x12V\n\x04type\x18\x02 \x01(\x0e\x32H.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.Type\x12\x13\n\x0bis_repeated\x18\x03 \x01(\x08\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12T\n\x06schema\x18\x05 \x01(\x0b\x32\x44.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema"\xb1\x01\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\t\n\x05\x46LOAT\x10\x03\x12\n\n\x06RECORD\x10\x04\x12\t\n\x05\x42YTES\x10\x05\x12\x0b\n\x07\x42OOLEAN\x10\x06\x12\r\n\tTIMESTAMP\x10\x07\x12\x08\n\x04\x44\x41TE\x10\x08\x12\x08\n\x04TIME\x10\t\x12\x0c\n\x08\x44\x41TETIME\x10\n\x12\x0b\n\x07NUMERIC\x10\x0b\x12\r\n\tGEOGRAPHY\x10\x0c\x1a\x63\n\x0cRecordSchema\x12S\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x43.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema\x1a\x91\x06\n\x0fTableDefinition\x12\x10\n\x08table_id\x18\x01 \x01(\t\x12\x13\n\x0bsource_uris\x18\x02 \x03(\t\x12N\n\x06\x66ormat\x18\x03 \x01(\x0e\x32>.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Format\x12\x17\n\x0fmax_bad_records\x18\x04 \x01(\x05\x12R\n\x08\x65ncoding\x18\x05 \x01(\x0e\x32@.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Encoding\x12g\n\x0b\x63sv_options\x18\x06 \x01(\x0b\x32R.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions\x12T\n\x06schema\x18\x07 \x01(\x0b\x32\x44.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema\x12\x39\n\x15ignore_unknown_values\x18\n \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x1a\x9f\x02\n\nCsvOptions\x12\x35\n\x0f\x66ield_delimiter\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x15\x61llow_quoted_newlines\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\nquote_char\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x36\n\x11skip_leading_rows\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x35\n\x11\x61llow_jagged_rows\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.BoolValue"~\n\x06\x46ormat\x12\x16\n\x12\x46ORMAT_UNSPECIFIED\x10\x00\x12\x07\n\x03\x43SV\x10\x01\x12\x08\n\x04JSON\x10\x02\x12\x08\n\x04\x41VRO\x10\x03\x12\x0c\n\x08RECORDIO\x10\x04\x12\x0c\n\x08\x43OLUMNIO\x10\x05\x12\r\n\tCAPACITOR\x10\x06\x12\x0b\n\x07PARQUET\x10\x07\x12\x07\n\x03ORC\x10\x08">\n\x08\x45ncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0e\n\nISO_8859_1\x10\x01\x12\x08\n\x04UTF8\x10\x02"\x95\x01\n\x18UpdateTransferRunRequest\x12H\n\x0ctransfer_run\x18\x01 \x01(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\x80\x01\n\x1dLogTransferRunMessagesRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12Q\n\x11transfer_messages\x18\x02 \x03(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.TransferMessage"\xab\x01\n\x18StartBigQueryJobsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12N\n\rimported_data\x18\x02 \x03(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo\x12\x18\n\x10user_credentials\x18\x03 \x01(\x0c\x12\x17\n\x0fmax_parallelism\x18\x08 \x01(\x05" \n\x10\x46inishRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x90\x01\n!CreateDataSourceDefinitionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12[\n\x16\x64\x61ta_source_definition\x18\x02 \x01(\x0b\x32;.google.cloud.bigquery.datatransfer.v1.DataSourceDefinition"\xb1\x01\n!UpdateDataSourceDefinitionRequest\x12[\n\x16\x64\x61ta_source_definition\x18\x01 \x01(\x0b\x32;.google.cloud.bigquery.datatransfer.v1.DataSourceDefinition\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"1\n!DeleteDataSourceDefinitionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t".\n\x1eGetDataSourceDefinitionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"Y\n ListDataSourceDefinitionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"\x9a\x01\n!ListDataSourceDefinitionsResponse\x12\\\n\x17\x64\x61ta_source_definitions\x18\x01 \x03(\x0b\x32;.google.cloud.bigquery.datatransfer.v1.DataSourceDefinition\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xcb\x02\n\x14\x44\x61taSourceDefinition\x12\x0c\n\x04name\x18\x15 \x01(\t\x12\x46\n\x0b\x64\x61ta_source\x18\x01 \x01(\x0b\x32\x31.google.cloud.bigquery.datatransfer.v1.DataSource\x12!\n\x19transfer_run_pubsub_topic\x18\r \x01(\t\x12\x32\n\x0frun_time_offset\x18\x10 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rsupport_email\x18\x16 \x01(\t\x12\x17\n\x0fservice_account\x18\x02 \x01(\t\x12\x10\n\x08\x64isabled\x18\x05 \x01(\x08\x12$\n\x1ctransfer_config_pubsub_topic\x18\x0c \x01(\t\x12\x1e\n\x16supported_location_ids\x18\x17 \x03(\t*[\n\x10WriteDisposition\x12!\n\x1dWRITE_DISPOSITION_UNSPECIFIED\x10\x00\x12\x12\n\x0eWRITE_TRUNCATE\x10\x01\x12\x10\n\x0cWRITE_APPEND\x10\x02\x32\x9d\x10\n\x11\x44\x61taSourceService\x12\xe7\x01\n\x11UpdateTransferRun\x12?.google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest\x1a\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"]\x82\xd3\xe4\x93\x02W2G/v1/{transfer_run.name=projects/*/locations/*/transferConfigs/*/runs/*}:\x0ctransfer_run\x12\xc9\x01\n\x16LogTransferRunMessages\x12\x44.google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest\x1a\x16.google.protobuf.Empty"Q\x82\xd3\xe4\x93\x02K"F/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:logMessages:\x01*\x12\xc5\x01\n\x11StartBigQueryJobs\x12?.google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest\x1a\x16.google.protobuf.Empty"W\x82\xd3\xe4\x93\x02Q"L/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:startBigQueryJobs:\x01*\x12\xad\x01\n\tFinishRun\x12\x37.google.cloud.bigquery.datatransfer.v1.FinishRunRequest\x1a\x16.google.protobuf.Empty"O\x82\xd3\xe4\x93\x02I"D/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:finishRun:\x01*\x12\xfe\x01\n\x1a\x43reateDataSourceDefinition\x12H.google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest\x1a;.google.cloud.bigquery.datatransfer.v1.DataSourceDefinition"Y\x82\xd3\xe4\x93\x02S"9/v1/{parent=projects/*/locations/*}/dataSourceDefinitions:\x16\x64\x61ta_source_definition\x12\x95\x02\n\x1aUpdateDataSourceDefinition\x12H.google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest\x1a;.google.cloud.bigquery.datatransfer.v1.DataSourceDefinition"p\x82\xd3\xe4\x93\x02j2P/v1/{data_source_definition.name=projects/*/locations/*/dataSourceDefinitions/*}:\x16\x64\x61ta_source_definition\x12\xc1\x01\n\x1a\x44\x65leteDataSourceDefinition\x12H.google.cloud.bigquery.datatransfer.v1.DeleteDataSourceDefinitionRequest\x1a\x16.google.protobuf.Empty"A\x82\xd3\xe4\x93\x02;*9/v1/{name=projects/*/locations/*/dataSourceDefinitions/*}\x12\xe0\x01\n\x17GetDataSourceDefinition\x12\x45.google.cloud.bigquery.datatransfer.v1.GetDataSourceDefinitionRequest\x1a;.google.cloud.bigquery.datatransfer.v1.DataSourceDefinition"A\x82\xd3\xe4\x93\x02;\x12\x39/v1/{name=projects/*/locations/*/dataSourceDefinitions/*}\x12\xf1\x01\n\x19ListDataSourceDefinitions\x12G.google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest\x1aH.google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse"A\x82\xd3\xe4\x93\x02;\x12\x39/v1/{parent=projects/*/locations/*}/dataSourceDefinitions\x1a&\xca\x41#bigquerydatatransfer.googleapis.comB\xe1\x01\n)com.google.cloud.bigquery.datatransfer.v1B\x0f\x44\x61taSourceProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - -_WRITEDISPOSITION = _descriptor.EnumDescriptor( - name="WriteDisposition", - full_name="google.cloud.bigquery.datatransfer.v1.WriteDisposition", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="WRITE_DISPOSITION_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="WRITE_TRUNCATE", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="WRITE_APPEND", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3766, - serialized_end=3857, -) -_sym_db.RegisterEnumDescriptor(_WRITEDISPOSITION) - -WriteDisposition = enum_type_wrapper.EnumTypeWrapper(_WRITEDISPOSITION) -WRITE_DISPOSITION_UNSPECIFIED = 0 -WRITE_TRUNCATE = 1 -WRITE_APPEND = 2 - - -_IMPORTEDDATAINFO_FIELDSCHEMA_TYPE = _descriptor.EnumDescriptor( - name="Type", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.Type", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="STRING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="INTEGER", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FLOAT", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RECORD", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="BYTES", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="BOOLEAN", index=6, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="TIMESTAMP", index=7, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DATE", index=8, number=8, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="TIME", index=9, number=9, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DATETIME", index=10, number=10, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="NUMERIC", index=11, number=11, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="GEOGRAPHY", index=12, number=12, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1007, - serialized_end=1184, -) -_sym_db.RegisterEnumDescriptor(_IMPORTEDDATAINFO_FIELDSCHEMA_TYPE) - -_IMPORTEDDATAINFO_FORMAT = _descriptor.EnumDescriptor( - name="Format", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Format", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="FORMAT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="CSV", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="JSON", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="AVRO", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RECORDIO", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="COLUMNIO", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CAPACITOR", index=6, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PARQUET", index=7, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ORC", index=8, number=8, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2075, - serialized_end=2201, -) -_sym_db.RegisterEnumDescriptor(_IMPORTEDDATAINFO_FORMAT) - -_IMPORTEDDATAINFO_ENCODING = _descriptor.EnumDescriptor( - name="Encoding", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Encoding", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="ENCODING_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ISO_8859_1", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="UTF8", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2203, - serialized_end=2265, -) -_sym_db.RegisterEnumDescriptor(_IMPORTEDDATAINFO_ENCODING) - - -_IMPORTEDDATAINFO_FIELDSCHEMA = _descriptor.Descriptor( - name="FieldSchema", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="field_name", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.field_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="type", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="is_repeated", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.is_repeated", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.description", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="schema", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.schema", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_IMPORTEDDATAINFO_FIELDSCHEMA_TYPE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=755, - serialized_end=1184, -) - -_IMPORTEDDATAINFO_RECORDSCHEMA = _descriptor.Descriptor( - name="RecordSchema", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="fields", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema.fields", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1186, - serialized_end=1285, -) - -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS = _descriptor.Descriptor( - name="CsvOptions", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="field_delimiter", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions.field_delimiter", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="allow_quoted_newlines", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions.allow_quoted_newlines", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="quote_char", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions.quote_char", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="skip_leading_rows", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions.skip_leading_rows", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="allow_jagged_rows", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions.allow_jagged_rows", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1786, - serialized_end=2073, -) - -_IMPORTEDDATAINFO_TABLEDEFINITION = _descriptor.Descriptor( - name="TableDefinition", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="table_id", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.table_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="source_uris", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.source_uris", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="format", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.format", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max_bad_records", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.max_bad_records", - index=3, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="encoding", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.encoding", - index=4, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="csv_options", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.csv_options", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="schema", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.schema", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="ignore_unknown_values", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.ignore_unknown_values", - index=7, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1288, - serialized_end=2073, -) - -_IMPORTEDDATAINFO = _descriptor.Descriptor( - name="ImportedDataInfo", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sql", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.sql", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="destination_table_id", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.destination_table_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="destination_table_description", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.destination_table_description", - index=2, - number=10, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="table_defs", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.table_defs", - index=3, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="user_defined_functions", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.user_defined_functions", - index=4, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="write_disposition", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.write_disposition", - index=5, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _IMPORTEDDATAINFO_FIELDSCHEMA, - _IMPORTEDDATAINFO_RECORDSCHEMA, - _IMPORTEDDATAINFO_TABLEDEFINITION, - ], - enum_types=[_IMPORTEDDATAINFO_FORMAT, _IMPORTEDDATAINFO_ENCODING], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=443, - serialized_end=2265, -) - - -_UPDATETRANSFERRUNREQUEST = _descriptor.Descriptor( - name="UpdateTransferRunRequest", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="transfer_run", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest.transfer_run", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2268, - serialized_end=2417, -) - - -_LOGTRANSFERRUNMESSAGESREQUEST = _descriptor.Descriptor( - name="LogTransferRunMessagesRequest", - full_name="google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="transfer_messages", - full_name="google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest.transfer_messages", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2420, - serialized_end=2548, -) - - -_STARTBIGQUERYJOBSREQUEST = _descriptor.Descriptor( - name="StartBigQueryJobsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="imported_data", - full_name="google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest.imported_data", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="user_credentials", - full_name="google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest.user_credentials", - index=2, - number=3, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b(""), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max_parallelism", - full_name="google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest.max_parallelism", - index=3, - number=8, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2551, - serialized_end=2722, -) - - -_FINISHRUNREQUEST = _descriptor.Descriptor( - name="FinishRunRequest", - full_name="google.cloud.bigquery.datatransfer.v1.FinishRunRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.FinishRunRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2724, - serialized_end=2756, -) - - -_CREATEDATASOURCEDEFINITIONREQUEST = _descriptor.Descriptor( - name="CreateDataSourceDefinitionRequest", - full_name="google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data_source_definition", - full_name="google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest.data_source_definition", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2759, - serialized_end=2903, -) - - -_UPDATEDATASOURCEDEFINITIONREQUEST = _descriptor.Descriptor( - name="UpdateDataSourceDefinitionRequest", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="data_source_definition", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest.data_source_definition", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2906, - serialized_end=3083, -) - - -_DELETEDATASOURCEDEFINITIONREQUEST = _descriptor.Descriptor( - name="DeleteDataSourceDefinitionRequest", - full_name="google.cloud.bigquery.datatransfer.v1.DeleteDataSourceDefinitionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.DeleteDataSourceDefinitionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3085, - serialized_end=3134, -) - - -_GETDATASOURCEDEFINITIONREQUEST = _descriptor.Descriptor( - name="GetDataSourceDefinitionRequest", - full_name="google.cloud.bigquery.datatransfer.v1.GetDataSourceDefinitionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.GetDataSourceDefinitionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3136, - serialized_end=3182, -) - - -_LISTDATASOURCEDEFINITIONSREQUEST = _descriptor.Descriptor( - name="ListDataSourceDefinitionsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3184, - serialized_end=3273, -) - - -_LISTDATASOURCEDEFINITIONSRESPONSE = _descriptor.Descriptor( - name="ListDataSourceDefinitionsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="data_source_definitions", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse.data_source_definitions", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3276, - serialized_end=3430, -) - - -_DATASOURCEDEFINITION = _descriptor.Descriptor( - name="DataSourceDefinition", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.name", - index=0, - number=21, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data_source", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.data_source", - index=1, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="transfer_run_pubsub_topic", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.transfer_run_pubsub_topic", - index=2, - number=13, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="run_time_offset", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.run_time_offset", - index=3, - number=16, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="support_email", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.support_email", - index=4, - number=22, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_account", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.service_account", - index=5, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="disabled", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.disabled", - index=6, - number=5, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="transfer_config_pubsub_topic", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.transfer_config_pubsub_topic", - index=7, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="supported_location_ids", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.supported_location_ids", - index=8, - number=23, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3433, - serialized_end=3764, -) - -_IMPORTEDDATAINFO_FIELDSCHEMA.fields_by_name[ - "type" -].enum_type = _IMPORTEDDATAINFO_FIELDSCHEMA_TYPE -_IMPORTEDDATAINFO_FIELDSCHEMA.fields_by_name[ - "schema" -].message_type = _IMPORTEDDATAINFO_RECORDSCHEMA -_IMPORTEDDATAINFO_FIELDSCHEMA.containing_type = _IMPORTEDDATAINFO -_IMPORTEDDATAINFO_FIELDSCHEMA_TYPE.containing_type = _IMPORTEDDATAINFO_FIELDSCHEMA -_IMPORTEDDATAINFO_RECORDSCHEMA.fields_by_name[ - "fields" -].message_type = _IMPORTEDDATAINFO_FIELDSCHEMA -_IMPORTEDDATAINFO_RECORDSCHEMA.containing_type = _IMPORTEDDATAINFO -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS.fields_by_name[ - "field_delimiter" -].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS.fields_by_name[ - "allow_quoted_newlines" -].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS.fields_by_name[ - "quote_char" -].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS.fields_by_name[ - "skip_leading_rows" -].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS.fields_by_name[ - "allow_jagged_rows" -].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS.containing_type = ( - _IMPORTEDDATAINFO_TABLEDEFINITION -) -_IMPORTEDDATAINFO_TABLEDEFINITION.fields_by_name[ - "format" -].enum_type = _IMPORTEDDATAINFO_FORMAT -_IMPORTEDDATAINFO_TABLEDEFINITION.fields_by_name[ - "encoding" -].enum_type = _IMPORTEDDATAINFO_ENCODING -_IMPORTEDDATAINFO_TABLEDEFINITION.fields_by_name[ - "csv_options" -].message_type = _IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS -_IMPORTEDDATAINFO_TABLEDEFINITION.fields_by_name[ - "schema" -].message_type = _IMPORTEDDATAINFO_RECORDSCHEMA -_IMPORTEDDATAINFO_TABLEDEFINITION.fields_by_name[ - "ignore_unknown_values" -].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE -_IMPORTEDDATAINFO_TABLEDEFINITION.containing_type = _IMPORTEDDATAINFO -_IMPORTEDDATAINFO.fields_by_name[ - "table_defs" -].message_type = _IMPORTEDDATAINFO_TABLEDEFINITION -_IMPORTEDDATAINFO.fields_by_name["write_disposition"].enum_type = _WRITEDISPOSITION -_IMPORTEDDATAINFO_FORMAT.containing_type = _IMPORTEDDATAINFO -_IMPORTEDDATAINFO_ENCODING.containing_type = _IMPORTEDDATAINFO -_UPDATETRANSFERRUNREQUEST.fields_by_name[ - "transfer_run" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN -) -_UPDATETRANSFERRUNREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LOGTRANSFERRUNMESSAGESREQUEST.fields_by_name[ - "transfer_messages" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERMESSAGE -) -_STARTBIGQUERYJOBSREQUEST.fields_by_name[ - "imported_data" -].message_type = _IMPORTEDDATAINFO -_CREATEDATASOURCEDEFINITIONREQUEST.fields_by_name[ - "data_source_definition" -].message_type = _DATASOURCEDEFINITION -_UPDATEDATASOURCEDEFINITIONREQUEST.fields_by_name[ - "data_source_definition" -].message_type = _DATASOURCEDEFINITION -_UPDATEDATASOURCEDEFINITIONREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTDATASOURCEDEFINITIONSRESPONSE.fields_by_name[ - "data_source_definitions" -].message_type = _DATASOURCEDEFINITION -_DATASOURCEDEFINITION.fields_by_name[ - "data_source" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2._DATASOURCE -) -_DATASOURCEDEFINITION.fields_by_name[ - "run_time_offset" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -DESCRIPTOR.message_types_by_name["ImportedDataInfo"] = _IMPORTEDDATAINFO -DESCRIPTOR.message_types_by_name["UpdateTransferRunRequest"] = _UPDATETRANSFERRUNREQUEST -DESCRIPTOR.message_types_by_name[ - "LogTransferRunMessagesRequest" -] = _LOGTRANSFERRUNMESSAGESREQUEST -DESCRIPTOR.message_types_by_name["StartBigQueryJobsRequest"] = _STARTBIGQUERYJOBSREQUEST -DESCRIPTOR.message_types_by_name["FinishRunRequest"] = _FINISHRUNREQUEST -DESCRIPTOR.message_types_by_name[ - "CreateDataSourceDefinitionRequest" -] = _CREATEDATASOURCEDEFINITIONREQUEST -DESCRIPTOR.message_types_by_name[ - "UpdateDataSourceDefinitionRequest" -] = _UPDATEDATASOURCEDEFINITIONREQUEST -DESCRIPTOR.message_types_by_name[ - "DeleteDataSourceDefinitionRequest" -] = _DELETEDATASOURCEDEFINITIONREQUEST -DESCRIPTOR.message_types_by_name[ - "GetDataSourceDefinitionRequest" -] = _GETDATASOURCEDEFINITIONREQUEST -DESCRIPTOR.message_types_by_name[ - "ListDataSourceDefinitionsRequest" -] = _LISTDATASOURCEDEFINITIONSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListDataSourceDefinitionsResponse" -] = _LISTDATASOURCEDEFINITIONSRESPONSE -DESCRIPTOR.message_types_by_name["DataSourceDefinition"] = _DATASOURCEDEFINITION -DESCRIPTOR.enum_types_by_name["WriteDisposition"] = _WRITEDISPOSITION -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ImportedDataInfo = _reflection.GeneratedProtocolMessageType( - "ImportedDataInfo", - (_message.Message,), - dict( - FieldSchema=_reflection.GeneratedProtocolMessageType( - "FieldSchema", - (_message.Message,), - dict( - DESCRIPTOR=_IMPORTEDDATAINFO_FIELDSCHEMA, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Defines schema of a field in the imported data. - - - Attributes: - field_name: - Field name. Matches: [A-Za-z\_][A-Za-z\_0-9]{0,127} - type: - Field type - is_repeated: - Is field repeated. - description: - Description for this field. - schema: - Present iff type == RECORD. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema) - ), - ), - RecordSchema=_reflection.GeneratedProtocolMessageType( - "RecordSchema", - (_message.Message,), - dict( - DESCRIPTOR=_IMPORTEDDATAINFO_RECORDSCHEMA, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Describes schema of the data to be ingested. - - - Attributes: - fields: - One field per column in the record. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema) - ), - ), - TableDefinition=_reflection.GeneratedProtocolMessageType( - "TableDefinition", - (_message.Message,), - dict( - CsvOptions=_reflection.GeneratedProtocolMessageType( - "CsvOptions", - (_message.Message,), - dict( - DESCRIPTOR=_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""CSV specific options. - - - Attributes: - field_delimiter: - The delimiter. We currently restrict this to U+0001 to U+00FF - and apply additional constraints during validation. - allow_quoted_newlines: - Whether CSV files are allowed to have quoted newlines. If - quoted newlines are allowed, we can't split CSV files. - quote_char: - The quote character. We currently restrict this to U+0000 to - U+00FF and apply additional constraints during validation. Set - to ':raw-latex:`\0`' to indicate no quote is used. - skip_leading_rows: - Number of leading rows to skip. - allow_jagged_rows: - Accept rows that are missing trailing optional columns. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions) - ), - ), - DESCRIPTOR=_IMPORTEDDATAINFO_TABLEDEFINITION, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""External table definition. These tables can be referenced with 'name' in - the query and can be read just like any other table. - - - Attributes: - table_id: - BigQuery table\_id (required). This will be used to reference - this table in the query. - source_uris: - URIs for the data to be imported. All URIs must be from the - same storage system. - format: - Describes the format of the data in source\_uri. - max_bad_records: - Specify the maximum number of bad records that can be ignored. - If bad records exceed this threshold the query is aborted. - encoding: - Character encoding of the input when applicable (CSV, JSON). - Defaults to UTF8. - csv_options: - CSV specific options. - schema: - Optional schema for the data. When not specified for JSON and - CSV formats we will try to detect it automatically. - ignore_unknown_values: - Indicates if extra values that are not represented in the - table schema is allowed. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition) - ), - ), - DESCRIPTOR=_IMPORTEDDATAINFO, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Describes data which should be imported. - - - Attributes: - sql: - SQL query to run. When empty, API checks that there is only - one table\_def specified and loads this table. Only Standard - SQL queries are accepted. Legacy SQL is not allowed. - destination_table_id: - Table where results should be written. - destination_table_description: - The description of a destination table. This can be several - sentences or paragraphs describing the table contents in - detail. - table_defs: - When used WITHOUT the "sql" parameter, describes the schema of - the destination table. When used WITH the "sql" parameter, - describes tables with data stored outside of BigQuery. - user_defined_functions: - Inline code for User-defined function resources. Ignored when - "sql" parameter is empty. - write_disposition: - Specifies the action if the destination table already exists. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ImportedDataInfo) - ), -) -_sym_db.RegisterMessage(ImportedDataInfo) -_sym_db.RegisterMessage(ImportedDataInfo.FieldSchema) -_sym_db.RegisterMessage(ImportedDataInfo.RecordSchema) -_sym_db.RegisterMessage(ImportedDataInfo.TableDefinition) -_sym_db.RegisterMessage(ImportedDataInfo.TableDefinition.CsvOptions) - -UpdateTransferRunRequest = _reflection.GeneratedProtocolMessageType( - "UpdateTransferRunRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATETRANSFERRUNREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""A request to update a transfer run. - - - Attributes: - transfer_run: - Run name must be set and correspond to an already existing - run. Only state, error\_status, and data\_version fields will - be updated. All other fields will be ignored. - update_mask: - Required list of fields to be updated in this request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest) - ), -) -_sym_db.RegisterMessage(UpdateTransferRunRequest) - -LogTransferRunMessagesRequest = _reflection.GeneratedProtocolMessageType( - "LogTransferRunMessagesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LOGTRANSFERRUNMESSAGESREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""A request to add transfer status messages to the run. - - - Attributes: - name: - Name of the resource in the form: "projects/{project\_id}/loca - tions/{location\_id}/transferConfigs/{config\_id}/runs/{run\_i - d}" - transfer_messages: - Messages to append. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest) - ), -) -_sym_db.RegisterMessage(LogTransferRunMessagesRequest) - -StartBigQueryJobsRequest = _reflection.GeneratedProtocolMessageType( - "StartBigQueryJobsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_STARTBIGQUERYJOBSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""A request to start and monitor a BigQuery load job. - - - Attributes: - name: - Name of the resource in the form: "projects/{project\_id}/loca - tions/{location\_id}/transferConfigs/{config\_id}/runs/{run\_i - d}" - imported_data: - Import jobs which should be started and monitored. - user_credentials: - User credentials which should be used to start/monitor - BigQuery jobs. If not specified, then jobs are started using - data source service account credentials. This may be OAuth - token or JWT token. - max_parallelism: - The number of BQ Jobs that can run in parallel. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest) - ), -) -_sym_db.RegisterMessage(StartBigQueryJobsRequest) - -FinishRunRequest = _reflection.GeneratedProtocolMessageType( - "FinishRunRequest", - (_message.Message,), - dict( - DESCRIPTOR=_FINISHRUNREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""A request to finish a run. - - - Attributes: - name: - Name of the resource in the form: "projects/{project\_id}/loca - tions/{location\_id}/transferConfigs/{config\_id}/runs/{run\_i - d}" - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.FinishRunRequest) - ), -) -_sym_db.RegisterMessage(FinishRunRequest) - -CreateDataSourceDefinitionRequest = _reflection.GeneratedProtocolMessageType( - "CreateDataSourceDefinitionRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATEDATASOURCEDEFINITIONREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Represents the request of the CreateDataSourceDefinition method. - - - Attributes: - parent: - The BigQuery project id for which data source definition is - associated. Must be in the form: - ``projects/{project_id}/locations/{location_id}`` - data_source_definition: - Data source definition. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest) - ), -) -_sym_db.RegisterMessage(CreateDataSourceDefinitionRequest) - -UpdateDataSourceDefinitionRequest = _reflection.GeneratedProtocolMessageType( - "UpdateDataSourceDefinitionRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATEDATASOURCEDEFINITIONREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Represents the request of the UpdateDataSourceDefinition method. - - - Attributes: - data_source_definition: - Data source definition. - update_mask: - Update field mask. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest) - ), -) -_sym_db.RegisterMessage(UpdateDataSourceDefinitionRequest) - -DeleteDataSourceDefinitionRequest = _reflection.GeneratedProtocolMessageType( - "DeleteDataSourceDefinitionRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEDATASOURCEDEFINITIONREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Represents the request of the DeleteDataSourceDefinition method. All - transfer configs associated with the data source must be deleted first, - before the data source can be deleted. - - - Attributes: - name: - The field will contain name of the resource requested, for - example: ``projects/{project_id}/locations/{location_id}/dataS - ourceDefinitions/{data_source_id}`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteDataSourceDefinitionRequest) - ), -) -_sym_db.RegisterMessage(DeleteDataSourceDefinitionRequest) - -GetDataSourceDefinitionRequest = _reflection.GeneratedProtocolMessageType( - "GetDataSourceDefinitionRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETDATASOURCEDEFINITIONREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Represents the request of the GetDataSourceDefinition method. - - - Attributes: - name: - The field will contain name of the resource requested. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetDataSourceDefinitionRequest) - ), -) -_sym_db.RegisterMessage(GetDataSourceDefinitionRequest) - -ListDataSourceDefinitionsRequest = _reflection.GeneratedProtocolMessageType( - "ListDataSourceDefinitionsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTDATASOURCEDEFINITIONSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Represents the request of the ListDataSourceDefinitions method. - - - Attributes: - parent: - The BigQuery project id for which data sources should be - returned. Must be in the form: - ``projects/{project_id}/locations/{location_id}`` - page_token: - Pagination token, which can be used to request a specific page - of ``ListDataSourceDefinitionsRequest`` list results. For - multiple-page results, ``ListDataSourceDefinitionsResponse`` - outputs a ``next_page`` token, which can be used as the - ``page_token`` value to request the next page of the list - results. - page_size: - Page size. The default page size is the maximum value of 1000 - results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest) - ), -) -_sym_db.RegisterMessage(ListDataSourceDefinitionsRequest) - -ListDataSourceDefinitionsResponse = _reflection.GeneratedProtocolMessageType( - "ListDataSourceDefinitionsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTDATASOURCEDEFINITIONSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Returns a list of supported data source definitions. - - - Attributes: - data_source_definitions: - List of supported data source definitions. - next_page_token: - Output only. The next-pagination token. For multiple-page list - results, this token can be used as the - ``ListDataSourceDefinitionsRequest.page_token`` to request the - next page of the list results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse) - ), -) -_sym_db.RegisterMessage(ListDataSourceDefinitionsResponse) - -DataSourceDefinition = _reflection.GeneratedProtocolMessageType( - "DataSourceDefinition", - (_message.Message,), - dict( - DESCRIPTOR=_DATASOURCEDEFINITION, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Represents the data source definition. - - - Attributes: - name: - The resource name of the data source definition. Data source - definition names have the form ``projects/{project_id}/locatio - ns/{location}/dataSourceDefinitions/{data_source_id}``. - data_source: - Data source metadata. - transfer_run_pubsub_topic: - The Pub/Sub topic to be used for broadcasting a message when a - transfer run is created. Both this topic and - transfer\_config\_pubsub\_topic can be set to a custom topic. - By default, both topics are auto-generated if none of them is - provided when creating the definition. However, if one topic - is manually set, the other topic has to be manually set as - well. The only difference is that transfer\_run\_pubsub\_topic - must be a non-empty Pub/Sub topic, but - transfer\_config\_pubsub\_topic can be set to empty. The - comments about "{location}" for - transfer\_config\_pubsub\_topic apply here too. - run_time_offset: - Duration which should be added to schedule\_time to calculate - run\_time when job is scheduled. Only applicable for - automatically scheduled transfer runs. Used to start a run - early on a data source that supports continuous data refresh - to compensate for unknown timezone offsets. Use a negative - number to start a run late for data sources not supporting - continuous data refresh. - support_email: - Support e-mail address of the OAuth client's Brand, which - contains the consent screen data. - service_account: - When service account is specified, BigQuery will share created - dataset with the given service account. Also, this service - account will be eligible to perform status updates and message - logging for data transfer runs for the corresponding - data\_source\_id. - disabled: - Is data source disabled? If true, data\_source is not visible. - API will also stop returning any data transfer configs and/or - runs associated with the data source. This setting has higher - priority than whitelisted\_project\_ids. - transfer_config_pubsub_topic: - The Pub/Sub topic to use for broadcasting a message for - transfer config. If empty, a message will not be broadcasted. - Both this topic and transfer\_run\_pubsub\_topic are auto- - generated if none of them is provided when creating the - definition. It is recommended to provide - transfer\_config\_pubsub\_topic if a user-owned - transfer\_run\_pubsub\_topic is provided. Otherwise, it will - be set to empty. If "{location}" is found in the value, then - that means, data source wants to handle message separately for - datasets in different regions. We will replace {location} with - the actual dataset location, as the actual topic name. For - example, projects/connector/topics/scheduler-{location} could - become projects/connector/topics/scheduler-us. If "{location}" - is not found, then we will use the input value as topic name. - supported_location_ids: - Supported location\_ids used for deciding in which locations - Pub/Sub topics need to be created. If custom Pub/Sub topics - are used and they contains '{location}', the location\_ids - will be used for validating the topics by replacing the - '{location}' with the individual location in the list. The - valid values are the "location\_id" field of the response of - ``GET https://bigquerydatatransfer.googleapis.com/v1/{name=pro - jects/*}/locations`` In addition, if the data source needs to - support all available regions, supported\_location\_ids can be - set to "global" (a single string element). When "global" is - specified: 1) the data source implementation is supposed to - stage the data in proper region of the destination dataset; 2) - Data source developer should be aware of the implications - (e.g., network traffic latency, potential charge associated - with cross-region traffic, etc.) of supporting the "global" - region; - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DataSourceDefinition) - ), -) -_sym_db.RegisterMessage(DataSourceDefinition) - - -DESCRIPTOR._options = None - -_DATASOURCESERVICE = _descriptor.ServiceDescriptor( - name="DataSourceService", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService", - file=DESCRIPTOR, - index=0, - serialized_options=_b("\312A#bigquerydatatransfer.googleapis.com"), - serialized_start=3860, - serialized_end=5937, - methods=[ - _descriptor.MethodDescriptor( - name="UpdateTransferRun", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.UpdateTransferRun", - index=0, - containing_service=None, - input_type=_UPDATETRANSFERRUNREQUEST, - output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN, - serialized_options=_b( - "\202\323\344\223\002W2G/v1/{transfer_run.name=projects/*/locations/*/transferConfigs/*/runs/*}:\014transfer_run" - ), - ), - _descriptor.MethodDescriptor( - name="LogTransferRunMessages", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.LogTransferRunMessages", - index=1, - containing_service=None, - input_type=_LOGTRANSFERRUNMESSAGESREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002K"F/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:logMessages:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="StartBigQueryJobs", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.StartBigQueryJobs", - index=2, - containing_service=None, - input_type=_STARTBIGQUERYJOBSREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002Q"L/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:startBigQueryJobs:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="FinishRun", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.FinishRun", - index=3, - containing_service=None, - input_type=_FINISHRUNREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002I"D/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:finishRun:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="CreateDataSourceDefinition", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.CreateDataSourceDefinition", - index=4, - containing_service=None, - input_type=_CREATEDATASOURCEDEFINITIONREQUEST, - output_type=_DATASOURCEDEFINITION, - serialized_options=_b( - '\202\323\344\223\002S"9/v1/{parent=projects/*/locations/*}/dataSourceDefinitions:\026data_source_definition' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateDataSourceDefinition", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.UpdateDataSourceDefinition", - index=5, - containing_service=None, - input_type=_UPDATEDATASOURCEDEFINITIONREQUEST, - output_type=_DATASOURCEDEFINITION, - serialized_options=_b( - "\202\323\344\223\002j2P/v1/{data_source_definition.name=projects/*/locations/*/dataSourceDefinitions/*}:\026data_source_definition" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteDataSourceDefinition", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.DeleteDataSourceDefinition", - index=6, - containing_service=None, - input_type=_DELETEDATASOURCEDEFINITIONREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002;*9/v1/{name=projects/*/locations/*/dataSourceDefinitions/*}" - ), - ), - _descriptor.MethodDescriptor( - name="GetDataSourceDefinition", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.GetDataSourceDefinition", - index=7, - containing_service=None, - input_type=_GETDATASOURCEDEFINITIONREQUEST, - output_type=_DATASOURCEDEFINITION, - serialized_options=_b( - "\202\323\344\223\002;\0229/v1/{name=projects/*/locations/*/dataSourceDefinitions/*}" - ), - ), - _descriptor.MethodDescriptor( - name="ListDataSourceDefinitions", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.ListDataSourceDefinitions", - index=8, - containing_service=None, - input_type=_LISTDATASOURCEDEFINITIONSREQUEST, - output_type=_LISTDATASOURCEDEFINITIONSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002;\0229/v1/{parent=projects/*/locations/*}/dataSourceDefinitions" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_DATASOURCESERVICE) - -DESCRIPTOR.services_by_name["DataSourceService"] = _DATASOURCESERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2_grpc.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2_grpc.py deleted file mode 100644 index 136c84c3623a..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2_grpc.py +++ /dev/null @@ -1,229 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.bigquery.datatransfer_v1.proto import ( - datasource_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2, -) -from google.cloud.bigquery.datatransfer_v1.proto import ( - transfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class DataSourceServiceStub(object): - """The Google BigQuery Data Transfer API allows BigQuery users to - configure transfer of their data from other Google Products into BigQuery. - This service exposes methods that should be used by data source backend. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.UpdateTransferRun = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/UpdateTransferRun", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.UpdateTransferRunRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferRun.FromString, - ) - self.LogTransferRunMessages = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/LogTransferRunMessages", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.LogTransferRunMessagesRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.StartBigQueryJobs = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/StartBigQueryJobs", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.StartBigQueryJobsRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.FinishRun = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/FinishRun", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.FinishRunRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.CreateDataSourceDefinition = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/CreateDataSourceDefinition", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.CreateDataSourceDefinitionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DataSourceDefinition.FromString, - ) - self.UpdateDataSourceDefinition = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/UpdateDataSourceDefinition", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.UpdateDataSourceDefinitionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DataSourceDefinition.FromString, - ) - self.DeleteDataSourceDefinition = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/DeleteDataSourceDefinition", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DeleteDataSourceDefinitionRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.GetDataSourceDefinition = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/GetDataSourceDefinition", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.GetDataSourceDefinitionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DataSourceDefinition.FromString, - ) - self.ListDataSourceDefinitions = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/ListDataSourceDefinitions", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.ListDataSourceDefinitionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.ListDataSourceDefinitionsResponse.FromString, - ) - - -class DataSourceServiceServicer(object): - """The Google BigQuery Data Transfer API allows BigQuery users to - configure transfer of their data from other Google Products into BigQuery. - This service exposes methods that should be used by data source backend. - """ - - def UpdateTransferRun(self, request, context): - """Update a transfer run. If successful, resets - data_source.update_deadline_seconds timer. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def LogTransferRunMessages(self, request, context): - """Log messages for a transfer run. If successful (at least 1 message), resets - data_source.update_deadline_seconds timer. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def StartBigQueryJobs(self, request, context): - """Notify the Data Transfer Service that data is ready for loading. - The Data Transfer Service will start and monitor multiple BigQuery Load - jobs for a transfer run. Monitored jobs will be automatically retried - and produce log messages when starting and finishing a job. - Can be called multiple times for the same transfer run. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def FinishRun(self, request, context): - """Notify the Data Transfer Service that the data source is done processing - the run. No more status updates or requests to start/monitor jobs will be - accepted. The run will be finalized by the Data Transfer Service when all - monitored jobs are completed. - Does not need to be called if the run is set to FAILED. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateDataSourceDefinition(self, request, context): - """Creates a data source definition. Calling this method will automatically - use your credentials to create the following Google Cloud resources in - YOUR Google Cloud project. - 1. OAuth client - 2. Pub/Sub Topics and Subscriptions in each supported_location_ids. e.g., - projects/{project_id}/{topics|subscriptions}/bigquerydatatransfer.{data_source_id}.{location_id}.run - The field data_source.client_id should be left empty in the input request, - as the API will create a new OAuth client on behalf of the caller. On the - other hand data_source.scopes usually need to be set when there are OAuth - scopes that need to be granted by end users. - 3. We need a longer deadline due to the 60 seconds SLO from Pub/Sub admin - Operations. This also applies to update and delete data source definition. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateDataSourceDefinition(self, request, context): - """Updates an existing data source definition. If changing - supported_location_ids, triggers same effects as mentioned in "Create a - data source definition." - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteDataSourceDefinition(self, request, context): - """Deletes a data source definition, all of the transfer configs associated - with this data source definition (if any) must be deleted first by the user - in ALL regions, in order to delete the data source definition. - This method is primarily meant for deleting data sources created during - testing stage. - If the data source is referenced by transfer configs in the region - specified in the request URL, the method will fail immediately. If in the - current region (e.g., US) it's not used by any transfer configs, but in - another region (e.g., EU) it is, then although the method will succeed in - region US, but it will fail when the deletion operation is replicated to - region EU. And eventually, the system will replicate the data source - definition back from EU to US, in order to bring all regions to - consistency. The final effect is that the data source appears to be - 'undeleted' in the US region. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetDataSourceDefinition(self, request, context): - """Retrieves an existing data source definition. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListDataSourceDefinitions(self, request, context): - """Lists supported data source definitions. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_DataSourceServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "UpdateTransferRun": grpc.unary_unary_rpc_method_handler( - servicer.UpdateTransferRun, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.UpdateTransferRunRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferRun.SerializeToString, - ), - "LogTransferRunMessages": grpc.unary_unary_rpc_method_handler( - servicer.LogTransferRunMessages, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.LogTransferRunMessagesRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "StartBigQueryJobs": grpc.unary_unary_rpc_method_handler( - servicer.StartBigQueryJobs, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.StartBigQueryJobsRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "FinishRun": grpc.unary_unary_rpc_method_handler( - servicer.FinishRun, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.FinishRunRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "CreateDataSourceDefinition": grpc.unary_unary_rpc_method_handler( - servicer.CreateDataSourceDefinition, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.CreateDataSourceDefinitionRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DataSourceDefinition.SerializeToString, - ), - "UpdateDataSourceDefinition": grpc.unary_unary_rpc_method_handler( - servicer.UpdateDataSourceDefinition, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.UpdateDataSourceDefinitionRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DataSourceDefinition.SerializeToString, - ), - "DeleteDataSourceDefinition": grpc.unary_unary_rpc_method_handler( - servicer.DeleteDataSourceDefinition, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DeleteDataSourceDefinitionRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "GetDataSourceDefinition": grpc.unary_unary_rpc_method_handler( - servicer.GetDataSourceDefinition, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.GetDataSourceDefinitionRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DataSourceDefinition.SerializeToString, - ), - "ListDataSourceDefinitions": grpc.unary_unary_rpc_method_handler( - servicer.ListDataSourceDefinitions, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.ListDataSourceDefinitionsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.ListDataSourceDefinitionsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.bigquery.datatransfer.v1.DataSourceService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto deleted file mode 100644 index 37accec99ff0..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto +++ /dev/null @@ -1,838 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.datatransfer.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/datatransfer/v1/transfer.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer"; -option java_multiple_files = true; -option java_outer_classname = "DataTransferProto"; -option java_package = "com.google.cloud.bigquery.datatransfer.v1"; -option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1"; - -// The Google BigQuery Data Transfer Service API enables BigQuery users to -// configure the transfer of their data from other Google Products into -// BigQuery. This service contains methods that are end user exposed. It backs -// up the frontend. -service DataTransferService { - option (google.api.default_host) = "bigquerydatatransfer.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Retrieves a supported data source and returns its settings, - // which can be used for UI rendering. - rpc GetDataSource(GetDataSourceRequest) returns (DataSource) { - option (google.api.http) = { - get: "/v1/{name=projects/*/locations/*/dataSources/*}" - additional_bindings { - get: "/v1/{name=projects/*/dataSources/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Lists supported data sources and returns their settings, - // which can be used for UI rendering. - rpc ListDataSources(ListDataSourcesRequest) returns (ListDataSourcesResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*/locations/*}/dataSources" - additional_bindings { - get: "/v1/{parent=projects/*}/dataSources" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Creates a new data transfer configuration. - rpc CreateTransferConfig(CreateTransferConfigRequest) returns (TransferConfig) { - option (google.api.http) = { - post: "/v1/{parent=projects/*/locations/*}/transferConfigs" - body: "transfer_config" - additional_bindings { - post: "/v1/{parent=projects/*}/transferConfigs" - body: "transfer_config" - } - }; - option (google.api.method_signature) = "parent,transfer_config"; - } - - // Updates a data transfer configuration. - // All fields must be set, even if they are not updated. - rpc UpdateTransferConfig(UpdateTransferConfigRequest) returns (TransferConfig) { - option (google.api.http) = { - patch: "/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}" - body: "transfer_config" - additional_bindings { - patch: "/v1/{transfer_config.name=projects/*/transferConfigs/*}" - body: "transfer_config" - } - }; - option (google.api.method_signature) = "transfer_config,update_mask"; - } - - // Deletes a data transfer configuration, - // including any associated transfer runs and logs. - rpc DeleteTransferConfig(DeleteTransferConfigRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/{name=projects/*/locations/*/transferConfigs/*}" - additional_bindings { - delete: "/v1/{name=projects/*/transferConfigs/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Returns information about a data transfer config. - rpc GetTransferConfig(GetTransferConfigRequest) returns (TransferConfig) { - option (google.api.http) = { - get: "/v1/{name=projects/*/locations/*/transferConfigs/*}" - additional_bindings { - get: "/v1/{name=projects/*/transferConfigs/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Returns information about all data transfers in the project. - rpc ListTransferConfigs(ListTransferConfigsRequest) returns (ListTransferConfigsResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*/locations/*}/transferConfigs" - additional_bindings { - get: "/v1/{parent=projects/*}/transferConfigs" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Creates transfer runs for a time range [start_time, end_time]. - // For each date - or whatever granularity the data source supports - in the - // range, one transfer run is created. - // Note that runs are created per UTC time in the time range. - // DEPRECATED: use StartManualTransferRuns instead. - rpc ScheduleTransferRuns(ScheduleTransferRunsRequest) returns (ScheduleTransferRunsResponse) { - option deprecated = true; - option (google.api.http) = { - post: "/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns" - body: "*" - additional_bindings { - post: "/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns" - body: "*" - } - }; - option (google.api.method_signature) = "parent,start_time,end_time"; - } - - // Start manual transfer runs to be executed now with schedule_time equal to - // current time. The transfer runs can be created for a time range where the - // run_time is between start_time (inclusive) and end_time (exclusive), or for - // a specific run_time. - rpc StartManualTransferRuns(StartManualTransferRunsRequest) returns (StartManualTransferRunsResponse) { - option (google.api.http) = { - post: "/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns" - body: "*" - additional_bindings { - post: "/v1/{parent=projects/*/transferConfigs/*}:startManualRuns" - body: "*" - } - }; - } - - // Returns information about the particular transfer run. - rpc GetTransferRun(GetTransferRunRequest) returns (TransferRun) { - option (google.api.http) = { - get: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}" - additional_bindings { - get: "/v1/{name=projects/*/transferConfigs/*/runs/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Deletes the specified transfer run. - rpc DeleteTransferRun(DeleteTransferRunRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}" - additional_bindings { - delete: "/v1/{name=projects/*/transferConfigs/*/runs/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Returns information about running and completed jobs. - rpc ListTransferRuns(ListTransferRunsRequest) returns (ListTransferRunsResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*/locations/*/transferConfigs/*}/runs" - additional_bindings { - get: "/v1/{parent=projects/*/transferConfigs/*}/runs" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Returns user facing log messages for the data transfer run. - rpc ListTransferLogs(ListTransferLogsRequest) returns (ListTransferLogsResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogs" - additional_bindings { - get: "/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Returns true if valid credentials exist for the given data source and - // requesting user. - // Some data sources doesn't support service account, so we need to talk to - // them on behalf of the end user. This API just checks whether we have OAuth - // token for the particular user, which is a pre-requisite before user can - // create a transfer config. - rpc CheckValidCreds(CheckValidCredsRequest) returns (CheckValidCredsResponse) { - option (google.api.http) = { - post: "/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds" - body: "*" - additional_bindings { - post: "/v1/{name=projects/*/dataSources/*}:checkValidCreds" - body: "*" - } - }; - option (google.api.method_signature) = "name"; - } -} - -// Represents a data source parameter with validation rules, so that -// parameters can be rendered in the UI. These parameters are given to us by -// supported data sources, and include all needed information for rendering -// and validation. -// Thus, whoever uses this api can decide to generate either generic ui, -// or custom data source specific forms. -message DataSourceParameter { - // Parameter type. - enum Type { - // Type unspecified. - TYPE_UNSPECIFIED = 0; - - // String parameter. - STRING = 1; - - // Integer parameter (64-bits). - // Will be serialized to json as string. - INTEGER = 2; - - // Double precision floating point parameter. - DOUBLE = 3; - - // Boolean parameter. - BOOLEAN = 4; - - // Deprecated. This field has no effect. - RECORD = 5; - - // Page ID for a Google+ Page. - PLUS_PAGE = 6; - } - - // Parameter identifier. - string param_id = 1; - - // Parameter display name in the user interface. - string display_name = 2; - - // Parameter description. - string description = 3; - - // Parameter type. - Type type = 4; - - // Is parameter required. - bool required = 5; - - // Deprecated. This field has no effect. - bool repeated = 6; - - // Regular expression which can be used for parameter validation. - string validation_regex = 7; - - // All possible values for the parameter. - repeated string allowed_values = 8; - - // For integer and double values specifies minimum allowed value. - google.protobuf.DoubleValue min_value = 9; - - // For integer and double values specifies maxminum allowed value. - google.protobuf.DoubleValue max_value = 10; - - // Deprecated. This field has no effect. - repeated DataSourceParameter fields = 11; - - // Description of the requirements for this field, in case the user input does - // not fulfill the regex pattern or min/max values. - string validation_description = 12; - - // URL to a help document to further explain the naming requirements. - string validation_help_url = 13; - - // Cannot be changed after initial creation. - bool immutable = 14; - - // Deprecated. This field has no effect. - bool recurse = 15; - - // If true, it should not be used in new transfers, and it should not be - // visible to users. - bool deprecated = 20; -} - -// Represents data source metadata. Metadata is sufficient to -// render UI and request proper OAuth tokens. -message DataSource { - option (google.api.resource) = { - type: "bigquerydatatransfer.googleapis.com/DataSource" - pattern: "projects/{project}/dataSources/{data_source}" - pattern: "projects/{project}/locations/{location}/dataSources/{data_source}" - }; - - // The type of authorization needed for this data source. - enum AuthorizationType { - // Type unspecified. - AUTHORIZATION_TYPE_UNSPECIFIED = 0; - - // Use OAuth 2 authorization codes that can be exchanged - // for a refresh token on the backend. - AUTHORIZATION_CODE = 1; - - // Return an authorization code for a given Google+ page that can then be - // exchanged for a refresh token on the backend. - GOOGLE_PLUS_AUTHORIZATION_CODE = 2; - } - - // Represents how the data source supports data auto refresh. - enum DataRefreshType { - // The data source won't support data auto refresh, which is default value. - DATA_REFRESH_TYPE_UNSPECIFIED = 0; - - // The data source supports data auto refresh, and runs will be scheduled - // for the past few days. Does not allow custom values to be set for each - // transfer config. - SLIDING_WINDOW = 1; - - // The data source supports data auto refresh, and runs will be scheduled - // for the past few days. Allows custom values to be set for each transfer - // config. - CUSTOM_SLIDING_WINDOW = 2; - } - - // Output only. Data source resource name. - string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Data source id. - string data_source_id = 2; - - // User friendly data source name. - string display_name = 3; - - // User friendly data source description string. - string description = 4; - - // Data source client id which should be used to receive refresh token. - string client_id = 5; - - // Api auth scopes for which refresh token needs to be obtained. These are - // scopes needed by a data source to prepare data and ingest them into - // BigQuery, e.g., https://www.googleapis.com/auth/bigquery - repeated string scopes = 6; - - // Deprecated. This field has no effect. - TransferType transfer_type = 7 [deprecated = true]; - - // Deprecated. This field has no effect. - bool supports_multiple_transfers = 8 [deprecated = true]; - - // The number of seconds to wait for an update from the data source - // before the Data Transfer Service marks the transfer as FAILED. - int32 update_deadline_seconds = 9; - - // Default data transfer schedule. - // Examples of valid schedules include: - // `1st,3rd monday of month 15:30`, - // `every wed,fri of jan,jun 13:15`, and - // `first sunday of quarter 00:00`. - string default_schedule = 10; - - // Specifies whether the data source supports a user defined schedule, or - // operates on the default schedule. - // When set to `true`, user can override default schedule. - bool supports_custom_schedule = 11; - - // Data source parameters. - repeated DataSourceParameter parameters = 12; - - // Url for the help document for this data source. - string help_url = 13; - - // Indicates the type of authorization. - AuthorizationType authorization_type = 14; - - // Specifies whether the data source supports automatic data refresh for the - // past few days, and how it's supported. - // For some data sources, data might not be complete until a few days later, - // so it's useful to refresh data automatically. - DataRefreshType data_refresh_type = 15; - - // Default data refresh window on days. - // Only meaningful when `data_refresh_type` = `SLIDING_WINDOW`. - int32 default_data_refresh_window_days = 16; - - // Disables backfilling and manual run scheduling - // for the data source. - bool manual_runs_disabled = 17; - - // The minimum interval for scheduler to schedule runs. - google.protobuf.Duration minimum_schedule_interval = 18; -} - -// A request to get data source info. -message GetDataSourceRequest { - // Required. The field will contain name of the resource requested, for example: - // `projects/{project_id}/dataSources/{data_source_id}` or - // `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerydatatransfer.googleapis.com/DataSource" - } - ]; -} - -// Request to list supported data sources and their data transfer settings. -message ListDataSourcesRequest { - // Required. The BigQuery project id for which data sources should be returned. - // Must be in the form: `projects/{project_id}` or - // `projects/{project_id}/locations/{location_id} - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "bigquerydatatransfer.googleapis.com/DataSource" - } - ]; - - // Pagination token, which can be used to request a specific page - // of `ListDataSourcesRequest` list results. For multiple-page - // results, `ListDataSourcesResponse` outputs - // a `next_page` token, which can be used as the - // `page_token` value to request the next page of list results. - string page_token = 3; - - // Page size. The default page size is the maximum value of 1000 results. - int32 page_size = 4; -} - -// Returns list of supported data sources and their metadata. -message ListDataSourcesResponse { - // List of supported data sources and their transfer settings. - repeated DataSource data_sources = 1; - - // Output only. The next-pagination token. For multiple-page list results, - // this token can be used as the - // `ListDataSourcesRequest.page_token` - // to request the next page of list results. - string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to create a data transfer configuration. If new credentials are -// needed for this transfer configuration, an authorization code must be -// provided. If an authorization code is provided, the transfer configuration -// will be associated with the user id corresponding to the -// authorization code. Otherwise, the transfer configuration will be associated -// with the calling user. -message CreateTransferConfigRequest { - // Required. The BigQuery project id where the transfer configuration should be created. - // Must be in the format projects/{project_id}/locations/{location_id} or - // projects/{project_id}. If specified location and location of the - // destination bigquery dataset do not match - the request will fail. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "bigquerydatatransfer.googleapis.com/TransferConfig" - } - ]; - - // Required. Data transfer configuration to create. - TransferConfig transfer_config = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional OAuth2 authorization code to use with this transfer configuration. - // This is required if new credentials are needed, as indicated by - // `CheckValidCreds`. - // In order to obtain authorization_code, please make a - // request to - // https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= - // - // * client_id should be OAuth client_id of BigQuery DTS API for the given - // data source returned by ListDataSources method. - // * data_source_scopes are the scopes returned by ListDataSources method. - // * redirect_uri is an optional parameter. If not specified, then - // authorization code is posted to the opener of authorization flow window. - // Otherwise it will be sent to the redirect uri. A special value of - // urn:ietf:wg:oauth:2.0:oob means that authorization code should be - // returned in the title bar of the browser, with the page text prompting - // the user to copy the code and paste it in the application. - string authorization_code = 3; - - // Optional version info. If users want to find a very recent access token, - // that is, immediately after approving access, users have to set the - // version_info claim in the token request. To obtain the version_info, users - // must use the "none+gsession" response type. which be return a - // version_info back in the authorization response which be be put in a JWT - // claim in the token request. - string version_info = 5; - - // Optional service account name. If this field is set, transfer config will - // be created with this service account credentials. It requires that - // requesting user calling this API has permissions to act as this service - // account. - string service_account_name = 6; -} - -// A request to update a transfer configuration. To update the user id of the -// transfer configuration, an authorization code needs to be provided. -message UpdateTransferConfigRequest { - // Required. Data transfer configuration to create. - TransferConfig transfer_config = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional OAuth2 authorization code to use with this transfer configuration. - // If it is provided, the transfer configuration will be associated with the - // authorizing user. - // In order to obtain authorization_code, please make a - // request to - // https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= - // - // * client_id should be OAuth client_id of BigQuery DTS API for the given - // data source returned by ListDataSources method. - // * data_source_scopes are the scopes returned by ListDataSources method. - // * redirect_uri is an optional parameter. If not specified, then - // authorization code is posted to the opener of authorization flow window. - // Otherwise it will be sent to the redirect uri. A special value of - // urn:ietf:wg:oauth:2.0:oob means that authorization code should be - // returned in the title bar of the browser, with the page text prompting - // the user to copy the code and paste it in the application. - string authorization_code = 3; - - // Required. Required list of fields to be updated in this request. - google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional version info. If users want to find a very recent access token, - // that is, immediately after approving access, users have to set the - // version_info claim in the token request. To obtain the version_info, users - // must use the "none+gsession" response type. which be return a - // version_info back in the authorization response which be be put in a JWT - // claim in the token request. - string version_info = 5; - - // Optional service account name. If this field is set and - // "service_account_name" is set in update_mask, transfer config will be - // updated to use this service account credentials. It requires that - // requesting user calling this API has permissions to act as this service - // account. - string service_account_name = 6; -} - -// A request to get data transfer information. -message GetTransferConfigRequest { - // Required. The field will contain name of the resource requested, for example: - // `projects/{project_id}/transferConfigs/{config_id}` or - // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerydatatransfer.googleapis.com/TransferConfig" - } - ]; -} - -// A request to delete data transfer information. All associated transfer runs -// and log messages will be deleted as well. -message DeleteTransferConfigRequest { - // Required. The field will contain name of the resource requested, for example: - // `projects/{project_id}/transferConfigs/{config_id}` or - // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerydatatransfer.googleapis.com/TransferConfig" - } - ]; -} - -// A request to get data transfer run information. -message GetTransferRunRequest { - // Required. The field will contain name of the resource requested, for example: - // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or - // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerydatatransfer.googleapis.com/Run" - } - ]; -} - -// A request to delete data transfer run information. -message DeleteTransferRunRequest { - // Required. The field will contain name of the resource requested, for example: - // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or - // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerydatatransfer.googleapis.com/Run" - } - ]; -} - -// A request to list data transfers configured for a BigQuery project. -message ListTransferConfigsRequest { - // Required. The BigQuery project id for which data sources - // should be returned: `projects/{project_id}` or - // `projects/{project_id}/locations/{location_id}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "bigquerydatatransfer.googleapis.com/TransferConfig" - } - ]; - - // When specified, only configurations of requested data sources are returned. - repeated string data_source_ids = 2; - - // Pagination token, which can be used to request a specific page - // of `ListTransfersRequest` list results. For multiple-page - // results, `ListTransfersResponse` outputs - // a `next_page` token, which can be used as the - // `page_token` value to request the next page of list results. - string page_token = 3; - - // Page size. The default page size is the maximum value of 1000 results. - int32 page_size = 4; -} - -// The returned list of pipelines in the project. -message ListTransferConfigsResponse { - // Output only. The stored pipeline transfer configurations. - repeated TransferConfig transfer_configs = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The next-pagination token. For multiple-page list results, - // this token can be used as the - // `ListTransferConfigsRequest.page_token` - // to request the next page of list results. - string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to list data transfer runs. UI can use this method to show/filter -// specific data transfer runs. The data source can use this method to request -// all scheduled transfer runs. -message ListTransferRunsRequest { - // Represents which runs should be pulled. - enum RunAttempt { - // All runs should be returned. - RUN_ATTEMPT_UNSPECIFIED = 0; - - // Only latest run per day should be returned. - LATEST = 1; - } - - // Required. Name of transfer configuration for which transfer runs should be retrieved. - // Format of transfer configuration resource name is: - // `projects/{project_id}/transferConfigs/{config_id}` or - // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "bigquerydatatransfer.googleapis.com/Run" - } - ]; - - // When specified, only transfer runs with requested states are returned. - repeated TransferState states = 2; - - // Pagination token, which can be used to request a specific page - // of `ListTransferRunsRequest` list results. For multiple-page - // results, `ListTransferRunsResponse` outputs - // a `next_page` token, which can be used as the - // `page_token` value to request the next page of list results. - string page_token = 3; - - // Page size. The default page size is the maximum value of 1000 results. - int32 page_size = 4; - - // Indicates how run attempts are to be pulled. - RunAttempt run_attempt = 5; -} - -// The returned list of pipelines in the project. -message ListTransferRunsResponse { - // Output only. The stored pipeline transfer runs. - repeated TransferRun transfer_runs = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The next-pagination token. For multiple-page list results, - // this token can be used as the - // `ListTransferRunsRequest.page_token` - // to request the next page of list results. - string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to get user facing log messages associated with data transfer run. -message ListTransferLogsRequest { - // Required. Transfer run name in the form: - // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or - // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerydatatransfer.googleapis.com/Run" - } - ]; - - // Pagination token, which can be used to request a specific page - // of `ListTransferLogsRequest` list results. For multiple-page - // results, `ListTransferLogsResponse` outputs - // a `next_page` token, which can be used as the - // `page_token` value to request the next page of list results. - string page_token = 4; - - // Page size. The default page size is the maximum value of 1000 results. - int32 page_size = 5; - - // Message types to return. If not populated - INFO, WARNING and ERROR - // messages are returned. - repeated TransferMessage.MessageSeverity message_types = 6; -} - -// The returned list transfer run messages. -message ListTransferLogsResponse { - // Output only. The stored pipeline transfer messages. - repeated TransferMessage transfer_messages = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The next-pagination token. For multiple-page list results, - // this token can be used as the - // `GetTransferRunLogRequest.page_token` - // to request the next page of list results. - string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to determine whether the user has valid credentials. This method -// is used to limit the number of OAuth popups in the user interface. The -// user id is inferred from the API call context. -// If the data source has the Google+ authorization type, this method -// returns false, as it cannot be determined whether the credentials are -// already valid merely based on the user id. -message CheckValidCredsRequest { - // Required. The data source in the form: - // `projects/{project_id}/dataSources/{data_source_id}` or - // `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerydatatransfer.googleapis.com/DataSource" - } - ]; -} - -// A response indicating whether the credentials exist and are valid. -message CheckValidCredsResponse { - // If set to `true`, the credentials exist and are valid. - bool has_valid_creds = 1; -} - -// A request to schedule transfer runs for a time range. -message ScheduleTransferRunsRequest { - // Required. Transfer configuration name in the form: - // `projects/{project_id}/transferConfigs/{config_id}` or - // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerydatatransfer.googleapis.com/TransferConfig" - } - ]; - - // Required. Start time of the range of transfer runs. For example, - // `"2017-05-25T00:00:00+00:00"`. - google.protobuf.Timestamp start_time = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. End time of the range of transfer runs. For example, - // `"2017-05-30T00:00:00+00:00"`. - google.protobuf.Timestamp end_time = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// A response to schedule transfer runs for a time range. -message ScheduleTransferRunsResponse { - // The transfer runs that were scheduled. - repeated TransferRun runs = 1; -} - -// A request to start manual transfer runs. -message StartManualTransferRunsRequest { - // A specification for a time range, this will request transfer runs with - // run_time between start_time (inclusive) and end_time (exclusive). - message TimeRange { - // Start time of the range of transfer runs. For example, - // `"2017-05-25T00:00:00+00:00"`. The start_time must be strictly less than - // the end_time. Creates transfer runs where run_time is in the range betwen - // start_time (inclusive) and end_time (exlusive). - google.protobuf.Timestamp start_time = 1; - - // End time of the range of transfer runs. For example, - // `"2017-05-30T00:00:00+00:00"`. The end_time must not be in the future. - // Creates transfer runs where run_time is in the range betwen start_time - // (inclusive) and end_time (exlusive). - google.protobuf.Timestamp end_time = 2; - } - - // Transfer configuration name in the form: - // `projects/{project_id}/transferConfigs/{config_id}` or - // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - string parent = 1 [(google.api.resource_reference) = { - type: "bigquerydatatransfer.googleapis.com/TransferConfig" - }]; - - // The requested time specification - this can be a time range or a specific - // run_time. - oneof time { - // Time range for the transfer runs that should be started. - TimeRange requested_time_range = 3; - - // Specific run_time for a transfer run to be started. The - // requested_run_time must not be in the future. - google.protobuf.Timestamp requested_run_time = 4; - } -} - -// A response to start manual transfer runs. -message StartManualTransferRunsResponse { - // The transfer runs that were created. - repeated TransferRun runs = 1; -} diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py deleted file mode 100644 index a5f75c5d4a2e..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py +++ /dev/null @@ -1,3353 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.bigquery_datatransfer_v1.proto import ( - transfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto", - package="google.cloud.bigquery.datatransfer.v1", - syntax="proto3", - serialized_options=_b( - "\n)com.google.cloud.bigquery.datatransfer.v1B\021DataTransferProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1" - ), - serialized_pb=_b( - '\n>google/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto"\x85\x05\n\x13\x44\x61taSourceParameter\x12\x10\n\x08param_id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12M\n\x04type\x18\x04 \x01(\x0e\x32?.google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type\x12\x10\n\x08required\x18\x05 \x01(\x08\x12\x10\n\x08repeated\x18\x06 \x01(\x08\x12\x18\n\x10validation_regex\x18\x07 \x01(\t\x12\x16\n\x0e\x61llowed_values\x18\x08 \x03(\t\x12/\n\tmin_value\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tmax_value\x18\n \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12J\n\x06\x66ields\x18\x0b \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x1e\n\x16validation_description\x18\x0c \x01(\t\x12\x1b\n\x13validation_help_url\x18\r \x01(\t\x12\x11\n\timmutable\x18\x0e \x01(\x08\x12\x0f\n\x07recurse\x18\x0f \x01(\x08\x12\x12\n\ndeprecated\x18\x14 \x01(\x08"i\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x12\n\n\x06RECORD\x10\x05\x12\r\n\tPLUS_PAGE\x10\x06"\x84\t\n\nDataSource\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x16\n\x0e\x64\x61ta_source_id\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x11\n\tclient_id\x18\x05 \x01(\t\x12\x0e\n\x06scopes\x18\x06 \x03(\t\x12N\n\rtransfer_type\x18\x07 \x01(\x0e\x32\x33.google.cloud.bigquery.datatransfer.v1.TransferTypeB\x02\x18\x01\x12\'\n\x1bsupports_multiple_transfers\x18\x08 \x01(\x08\x42\x02\x18\x01\x12\x1f\n\x17update_deadline_seconds\x18\t \x01(\x05\x12\x18\n\x10\x64\x65\x66\x61ult_schedule\x18\n \x01(\t\x12 \n\x18supports_custom_schedule\x18\x0b \x01(\x08\x12N\n\nparameters\x18\x0c \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x10\n\x08help_url\x18\r \x01(\t\x12_\n\x12\x61uthorization_type\x18\x0e \x01(\x0e\x32\x43.google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType\x12\\\n\x11\x64\x61ta_refresh_type\x18\x0f \x01(\x0e\x32\x41.google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType\x12(\n default_data_refresh_window_days\x18\x10 \x01(\x05\x12\x1c\n\x14manual_runs_disabled\x18\x11 \x01(\x08\x12<\n\x19minimum_schedule_interval\x18\x12 \x01(\x0b\x32\x19.google.protobuf.Duration"s\n\x11\x41uthorizationType\x12"\n\x1e\x41UTHORIZATION_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12\x41UTHORIZATION_CODE\x10\x01\x12"\n\x1eGOOGLE_PLUS_AUTHORIZATION_CODE\x10\x02"c\n\x0f\x44\x61taRefreshType\x12!\n\x1d\x44\x41TA_REFRESH_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eSLIDING_WINDOW\x10\x01\x12\x19\n\x15\x43USTOM_SLIDING_WINDOW\x10\x02:\xa5\x01\xea\x41\xa1\x01\n.bigquerydatatransfer.googleapis.com/DataSource\x12,projects/{project}/dataSources/{data_source}\x12\x41projects/{project}/locations/{location}/dataSources/{data_source}"\\\n\x14GetDataSourceRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource"\x87\x01\n\x16ListDataSourcesRequest\x12\x46\n\x06parent\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\x12.bigquerydatatransfer.googleapis.com/DataSource\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x80\x01\n\x17ListDataSourcesResponse\x12G\n\x0c\x64\x61ta_sources\x18\x01 \x03(\x0b\x32\x31.google.cloud.bigquery.datatransfer.v1.DataSource\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\x8e\x02\n\x1b\x43reateTransferConfigRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\x12\x32\x62igquerydatatransfer.googleapis.com/TransferConfig\x12S\n\x0ftransfer_config\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x14\n\x0cversion_info\x18\x05 \x01(\t\x12\x1c\n\x14service_account_name\x18\x06 \x01(\t"\xf8\x01\n\x1bUpdateTransferConfigRequest\x12S\n\x0ftransfer_config\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x14\n\x0cversion_info\x18\x05 \x01(\t\x12\x1c\n\x14service_account_name\x18\x06 \x01(\t"d\n\x18GetTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig"g\n\x1b\x44\x65leteTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig"V\n\x15GetTransferRunRequest\x12=\n\x04name\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run"Y\n\x18\x44\x65leteTransferRunRequest\x12=\n\x04name\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run"\xa8\x01\n\x1aListTransferConfigsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\x12\x32\x62igquerydatatransfer.googleapis.com/TransferConfig\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x91\x01\n\x1bListTransferConfigsResponse\x12T\n\x10transfer_configs\x18\x01 \x03(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xde\x02\n\x17ListTransferRunsRequest\x12?\n\x06parent\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\x12\'bigquerydatatransfer.googleapis.com/Run\x12\x44\n\x06states\x18\x02 \x03(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12^\n\x0brun_attempt\x18\x05 \x01(\x0e\x32I.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt"5\n\nRunAttempt\x12\x1b\n\x17RUN_ATTEMPT_UNSPECIFIED\x10\x00\x12\n\n\x06LATEST\x10\x01"\x88\x01\n\x18ListTransferRunsResponse\x12N\n\rtransfer_runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRunB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xe0\x01\n\x17ListTransferLogsRequest\x12?\n\x06parent\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12]\n\rmessage_types\x18\x06 \x03(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity"\x90\x01\n\x18ListTransferLogsResponse\x12V\n\x11transfer_messages\x18\x01 \x03(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.TransferMessageB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"^\n\x16\x43heckValidCredsRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource"2\n\x17\x43heckValidCredsResponse\x12\x17\n\x0fhas_valid_creds\x18\x01 \x01(\x08"\xd1\x01\n\x1bScheduleTransferRunsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x33\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12\x31\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02"`\n\x1cScheduleTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"\x87\x03\n\x1eStartManualTransferRunsRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12o\n\x14requested_time_range\x18\x03 \x01(\x0b\x32O.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRangeH\x00\x12\x38\n\x12requested_run_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x1ai\n\tTimeRange\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x06\n\x04time"c\n\x1fStartManualTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun2\xc3\x1f\n\x13\x44\x61taTransferService\x12\xe6\x01\n\rGetDataSource\x12;.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest\x1a\x31.google.cloud.bigquery.datatransfer.v1.DataSource"e\x82\xd3\xe4\x93\x02X\x12//v1/{name=projects/*/locations/*/dataSources/*}Z%\x12#/v1/{name=projects/*/dataSources/*}\xda\x41\x04name\x12\xf9\x01\n\x0fListDataSources\x12=.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest\x1a>.google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse"g\x82\xd3\xe4\x93\x02X\x12//v1/{parent=projects/*/locations/*}/dataSourcesZ%\x12#/v1/{parent=projects/*}/dataSources\xda\x41\x06parent\x12\xb6\x02\n\x14\x43reateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xa2\x01\x82\xd3\xe4\x93\x02\x82\x01"3/v1/{parent=projects/*/locations/*}/transferConfigs:\x0ftransfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\x0ftransfer_config\xda\x41\x16parent,transfer_config\x12\xdb\x02\n\x14UpdateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xc7\x01\x82\xd3\xe4\x93\x02\xa2\x01\x32\x43/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\x0ftransfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\x0ftransfer_config\xda\x41\x1btransfer_config,update_mask\x12\xe1\x01\n\x14\x44\x65leteTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest\x1a\x16.google.protobuf.Empty"m\x82\xd3\xe4\x93\x02`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*\'/v1/{name=projects/*/transferConfigs/*}\xda\x41\x04name\x12\xfa\x01\n\x11GetTransferConfig\x12?.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"m\x82\xd3\xe4\x93\x02`\x12\x33/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\x12\'/v1/{name=projects/*/transferConfigs/*}\xda\x41\x04name\x12\x8d\x02\n\x13ListTransferConfigs\x12\x41.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest\x1a\x42.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse"o\x82\xd3\xe4\x93\x02`\x12\x33/v1/{parent=projects/*/locations/*}/transferConfigsZ)\x12\'/v1/{parent=projects/*}/transferConfigs\xda\x41\x06parent\x12\xcd\x02\n\x14ScheduleTransferRuns\x12\x42.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest\x1a\x43.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse"\xab\x01\x88\x02\x01\x82\xd3\xe4\x93\x02\x84\x01"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\x01*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\x01*\xda\x41\x1aparent,start_time,end_time\x12\xbc\x02\n\x17StartManualTransferRuns\x12\x45.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest\x1a\x46.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse"\x91\x01\x82\xd3\xe4\x93\x02\x8a\x01"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\x01*Z>"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\x01*\x12\xff\x01\n\x0eGetTransferRun\x12<.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest\x1a\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"{\x82\xd3\xe4\x93\x02n\x12:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\x12./v1/{name=projects/*/transferConfigs/*/runs/*}\xda\x41\x04name\x12\xe9\x01\n\x11\x44\x65leteTransferRun\x12?.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest\x1a\x16.google.protobuf.Empty"{\x82\xd3\xe4\x93\x02n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\xda\x41\x04name\x12\x92\x02\n\x10ListTransferRuns\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse"}\x82\xd3\xe4\x93\x02n\x12:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\x12./v1/{parent=projects/*/transferConfigs/*}/runs\xda\x41\x06parent\x12\xb2\x02\n\x10ListTransferLogs\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse"\x9c\x01\x82\xd3\xe4\x93\x02\x8c\x01\x12I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\x12=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\xda\x41\x06parent\x12\x9e\x02\n\x0f\x43heckValidCreds\x12=.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest\x1a>.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse"\x8b\x01\x82\xd3\xe4\x93\x02~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\x01*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\x01*\xda\x41\x04name\x1aW\xca\x41#bigquerydatatransfer.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xe3\x01\n)com.google.cloud.bigquery.datatransfer.v1B\x11\x44\x61taTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, - ], -) - - -_DATASOURCEPARAMETER_TYPE = _descriptor.EnumDescriptor( - name="Type", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="STRING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="INTEGER", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DOUBLE", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="BOOLEAN", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RECORD", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PLUS_PAGE", index=6, number=6, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=981, - serialized_end=1086, -) -_sym_db.RegisterEnumDescriptor(_DATASOURCEPARAMETER_TYPE) - -_DATASOURCE_AUTHORIZATIONTYPE = _descriptor.EnumDescriptor( - name="AuthorizationType", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="AUTHORIZATION_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="AUTHORIZATION_CODE", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="GOOGLE_PLUS_AUTHORIZATION_CODE", - index=2, - number=2, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1861, - serialized_end=1976, -) -_sym_db.RegisterEnumDescriptor(_DATASOURCE_AUTHORIZATIONTYPE) - -_DATASOURCE_DATAREFRESHTYPE = _descriptor.EnumDescriptor( - name="DataRefreshType", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="DATA_REFRESH_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="SLIDING_WINDOW", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CUSTOM_SLIDING_WINDOW", - index=2, - number=2, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1978, - serialized_end=2077, -) -_sym_db.RegisterEnumDescriptor(_DATASOURCE_DATAREFRESHTYPE) - -_LISTTRANSFERRUNSREQUEST_RUNATTEMPT = _descriptor.EnumDescriptor( - name="RunAttempt", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="RUN_ATTEMPT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="LATEST", index=1, number=1, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4137, - serialized_end=4190, -) -_sym_db.RegisterEnumDescriptor(_LISTTRANSFERRUNSREQUEST_RUNATTEMPT) - - -_DATASOURCEPARAMETER = _descriptor.Descriptor( - name="DataSourceParameter", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="param_id", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.param_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.description", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="type", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.type", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="required", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.required", - index=4, - number=5, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="repeated", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.repeated", - index=5, - number=6, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="validation_regex", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.validation_regex", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="allowed_values", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.allowed_values", - index=7, - number=8, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="min_value", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.min_value", - index=8, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max_value", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.max_value", - index=9, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fields", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.fields", - index=10, - number=11, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="validation_description", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.validation_description", - index=11, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="validation_help_url", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.validation_help_url", - index=12, - number=13, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="immutable", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.immutable", - index=13, - number=14, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="recurse", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.recurse", - index=14, - number=15, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="deprecated", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.deprecated", - index=15, - number=20, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_DATASOURCEPARAMETER_TYPE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=441, - serialized_end=1086, -) - - -_DATASOURCE = _descriptor.Descriptor( - name="DataSource", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data_source_id", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.data_source_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.display_name", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.description", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="client_id", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.client_id", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="scopes", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.scopes", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="transfer_type", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.transfer_type", - index=6, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="supports_multiple_transfers", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.supports_multiple_transfers", - index=7, - number=8, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_deadline_seconds", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.update_deadline_seconds", - index=8, - number=9, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="default_schedule", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.default_schedule", - index=9, - number=10, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="supports_custom_schedule", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.supports_custom_schedule", - index=10, - number=11, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="parameters", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.parameters", - index=11, - number=12, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="help_url", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.help_url", - index=12, - number=13, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="authorization_type", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.authorization_type", - index=13, - number=14, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data_refresh_type", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.data_refresh_type", - index=14, - number=15, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="default_data_refresh_window_days", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.default_data_refresh_window_days", - index=15, - number=16, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="manual_runs_disabled", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.manual_runs_disabled", - index=16, - number=17, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="minimum_schedule_interval", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.minimum_schedule_interval", - index=17, - number=18, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_DATASOURCE_AUTHORIZATIONTYPE, _DATASOURCE_DATAREFRESHTYPE], - serialized_options=_b( - "\352A\241\001\n.bigquerydatatransfer.googleapis.com/DataSource\022,projects/{project}/dataSources/{data_source}\022Aprojects/{project}/locations/{location}/dataSources/{data_source}" - ), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1089, - serialized_end=2245, -) - - -_GETDATASOURCEREQUEST = _descriptor.Descriptor( - name="GetDataSourceRequest", - full_name="google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A0\n.bigquerydatatransfer.googleapis.com/DataSource" - ), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2247, - serialized_end=2339, -) - - -_LISTDATASOURCESREQUEST = _descriptor.Descriptor( - name="ListDataSourcesRequest", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A0\022.bigquerydatatransfer.googleapis.com/DataSource" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest.page_token", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest.page_size", - index=2, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2342, - serialized_end=2477, -) - - -_LISTDATASOURCESRESPONSE = _descriptor.Descriptor( - name="ListDataSourcesResponse", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="data_sources", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse.data_sources", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2480, - serialized_end=2608, -) - - -_CREATETRANSFERCONFIGREQUEST = _descriptor.Descriptor( - name="CreateTransferConfigRequest", - full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A4\0222bigquerydatatransfer.googleapis.com/TransferConfig" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="transfer_config", - full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest.transfer_config", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="authorization_code", - full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest.authorization_code", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version_info", - full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest.version_info", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_account_name", - full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest.service_account_name", - index=4, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2611, - serialized_end=2881, -) - - -_UPDATETRANSFERCONFIGREQUEST = _descriptor.Descriptor( - name="UpdateTransferConfigRequest", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="transfer_config", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest.transfer_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="authorization_code", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest.authorization_code", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest.update_mask", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version_info", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest.version_info", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_account_name", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest.service_account_name", - index=4, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2884, - serialized_end=3132, -) - - -_GETTRANSFERCONFIGREQUEST = _descriptor.Descriptor( - name="GetTransferConfigRequest", - full_name="google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" - ), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3134, - serialized_end=3234, -) - - -_DELETETRANSFERCONFIGREQUEST = _descriptor.Descriptor( - name="DeleteTransferConfigRequest", - full_name="google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" - ), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3236, - serialized_end=3339, -) - - -_GETTRANSFERRUNREQUEST = _descriptor.Descriptor( - name="GetTransferRunRequest", - full_name="google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A)\n'bigquerydatatransfer.googleapis.com/Run" - ), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3341, - serialized_end=3427, -) - - -_DELETETRANSFERRUNREQUEST = _descriptor.Descriptor( - name="DeleteTransferRunRequest", - full_name="google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A)\n'bigquerydatatransfer.googleapis.com/Run" - ), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3429, - serialized_end=3518, -) - - -_LISTTRANSFERCONFIGSREQUEST = _descriptor.Descriptor( - name="ListTransferConfigsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A4\0222bigquerydatatransfer.googleapis.com/TransferConfig" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data_source_ids", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest.data_source_ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest.page_size", - index=3, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3521, - serialized_end=3689, -) - - -_LISTTRANSFERCONFIGSRESPONSE = _descriptor.Descriptor( - name="ListTransferConfigsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="transfer_configs", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse.transfer_configs", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3692, - serialized_end=3837, -) - - -_LISTTRANSFERRUNSREQUEST = _descriptor.Descriptor( - name="ListTransferRunsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A)\022'bigquerydatatransfer.googleapis.com/Run" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="states", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.states", - index=1, - number=2, - type=14, - cpp_type=8, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.page_size", - index=3, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="run_attempt", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.run_attempt", - index=4, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_LISTTRANSFERRUNSREQUEST_RUNATTEMPT], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3840, - serialized_end=4190, -) - - -_LISTTRANSFERRUNSRESPONSE = _descriptor.Descriptor( - name="ListTransferRunsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="transfer_runs", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse.transfer_runs", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4193, - serialized_end=4329, -) - - -_LISTTRANSFERLOGSREQUEST = _descriptor.Descriptor( - name="ListTransferLogsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A)\n'bigquerydatatransfer.googleapis.com/Run" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest.page_token", - index=1, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest.page_size", - index=2, - number=5, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="message_types", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest.message_types", - index=3, - number=6, - type=14, - cpp_type=8, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4332, - serialized_end=4556, -) - - -_LISTTRANSFERLOGSRESPONSE = _descriptor.Descriptor( - name="ListTransferLogsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="transfer_messages", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse.transfer_messages", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4559, - serialized_end=4703, -) - - -_CHECKVALIDCREDSREQUEST = _descriptor.Descriptor( - name="CheckValidCredsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A0\n.bigquerydatatransfer.googleapis.com/DataSource" - ), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4705, - serialized_end=4799, -) - - -_CHECKVALIDCREDSRESPONSE = _descriptor.Descriptor( - name="CheckValidCredsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="has_valid_creds", - full_name="google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse.has_valid_creds", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4801, - serialized_end=4851, -) - - -_SCHEDULETRANSFERRUNSREQUEST = _descriptor.Descriptor( - name="ScheduleTransferRunsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest.start_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest.end_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4854, - serialized_end=5063, -) - - -_SCHEDULETRANSFERRUNSRESPONSE = _descriptor.Descriptor( - name="ScheduleTransferRunsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="runs", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse.runs", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5065, - serialized_end=5161, -) - - -_STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE = _descriptor.Descriptor( - name="TimeRange", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange.start_time", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange.end_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5442, - serialized_end=5547, -) - -_STARTMANUALTRANSFERRUNSREQUEST = _descriptor.Descriptor( - name="StartManualTransferRunsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="requested_time_range", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.requested_time_range", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="requested_run_time", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.requested_run_time", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="time", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.time", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=5164, - serialized_end=5555, -) - - -_STARTMANUALTRANSFERRUNSRESPONSE = _descriptor.Descriptor( - name="StartManualTransferRunsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="runs", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse.runs", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5557, - serialized_end=5656, -) - -_DATASOURCEPARAMETER.fields_by_name["type"].enum_type = _DATASOURCEPARAMETER_TYPE -_DATASOURCEPARAMETER.fields_by_name[ - "min_value" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_DATASOURCEPARAMETER.fields_by_name[ - "max_value" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_DATASOURCEPARAMETER.fields_by_name["fields"].message_type = _DATASOURCEPARAMETER -_DATASOURCEPARAMETER_TYPE.containing_type = _DATASOURCEPARAMETER -_DATASOURCE.fields_by_name[ - "transfer_type" -].enum_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERTYPE -) -_DATASOURCE.fields_by_name["parameters"].message_type = _DATASOURCEPARAMETER -_DATASOURCE.fields_by_name[ - "authorization_type" -].enum_type = _DATASOURCE_AUTHORIZATIONTYPE -_DATASOURCE.fields_by_name["data_refresh_type"].enum_type = _DATASOURCE_DATAREFRESHTYPE -_DATASOURCE.fields_by_name[ - "minimum_schedule_interval" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_DATASOURCE_AUTHORIZATIONTYPE.containing_type = _DATASOURCE -_DATASOURCE_DATAREFRESHTYPE.containing_type = _DATASOURCE -_LISTDATASOURCESRESPONSE.fields_by_name["data_sources"].message_type = _DATASOURCE -_CREATETRANSFERCONFIGREQUEST.fields_by_name[ - "transfer_config" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG -) -_UPDATETRANSFERCONFIGREQUEST.fields_by_name[ - "transfer_config" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG -) -_UPDATETRANSFERCONFIGREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTTRANSFERCONFIGSRESPONSE.fields_by_name[ - "transfer_configs" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG -) -_LISTTRANSFERRUNSREQUEST.fields_by_name[ - "states" -].enum_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERSTATE -) -_LISTTRANSFERRUNSREQUEST.fields_by_name[ - "run_attempt" -].enum_type = _LISTTRANSFERRUNSREQUEST_RUNATTEMPT -_LISTTRANSFERRUNSREQUEST_RUNATTEMPT.containing_type = _LISTTRANSFERRUNSREQUEST -_LISTTRANSFERRUNSRESPONSE.fields_by_name[ - "transfer_runs" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN -) -_LISTTRANSFERLOGSREQUEST.fields_by_name[ - "message_types" -].enum_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERMESSAGE_MESSAGESEVERITY -) -_LISTTRANSFERLOGSRESPONSE.fields_by_name[ - "transfer_messages" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERMESSAGE -) -_SCHEDULETRANSFERRUNSREQUEST.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SCHEDULETRANSFERRUNSREQUEST.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SCHEDULETRANSFERRUNSRESPONSE.fields_by_name[ - "runs" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN -) -_STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE.containing_type = ( - _STARTMANUALTRANSFERRUNSREQUEST -) -_STARTMANUALTRANSFERRUNSREQUEST.fields_by_name[ - "requested_time_range" -].message_type = _STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE -_STARTMANUALTRANSFERRUNSREQUEST.fields_by_name[ - "requested_run_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_STARTMANUALTRANSFERRUNSREQUEST.oneofs_by_name["time"].fields.append( - _STARTMANUALTRANSFERRUNSREQUEST.fields_by_name["requested_time_range"] -) -_STARTMANUALTRANSFERRUNSREQUEST.fields_by_name[ - "requested_time_range" -].containing_oneof = _STARTMANUALTRANSFERRUNSREQUEST.oneofs_by_name["time"] -_STARTMANUALTRANSFERRUNSREQUEST.oneofs_by_name["time"].fields.append( - _STARTMANUALTRANSFERRUNSREQUEST.fields_by_name["requested_run_time"] -) -_STARTMANUALTRANSFERRUNSREQUEST.fields_by_name[ - "requested_run_time" -].containing_oneof = _STARTMANUALTRANSFERRUNSREQUEST.oneofs_by_name["time"] -_STARTMANUALTRANSFERRUNSRESPONSE.fields_by_name[ - "runs" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN -) -DESCRIPTOR.message_types_by_name["DataSourceParameter"] = _DATASOURCEPARAMETER -DESCRIPTOR.message_types_by_name["DataSource"] = _DATASOURCE -DESCRIPTOR.message_types_by_name["GetDataSourceRequest"] = _GETDATASOURCEREQUEST -DESCRIPTOR.message_types_by_name["ListDataSourcesRequest"] = _LISTDATASOURCESREQUEST -DESCRIPTOR.message_types_by_name["ListDataSourcesResponse"] = _LISTDATASOURCESRESPONSE -DESCRIPTOR.message_types_by_name[ - "CreateTransferConfigRequest" -] = _CREATETRANSFERCONFIGREQUEST -DESCRIPTOR.message_types_by_name[ - "UpdateTransferConfigRequest" -] = _UPDATETRANSFERCONFIGREQUEST -DESCRIPTOR.message_types_by_name["GetTransferConfigRequest"] = _GETTRANSFERCONFIGREQUEST -DESCRIPTOR.message_types_by_name[ - "DeleteTransferConfigRequest" -] = _DELETETRANSFERCONFIGREQUEST -DESCRIPTOR.message_types_by_name["GetTransferRunRequest"] = _GETTRANSFERRUNREQUEST -DESCRIPTOR.message_types_by_name["DeleteTransferRunRequest"] = _DELETETRANSFERRUNREQUEST -DESCRIPTOR.message_types_by_name[ - "ListTransferConfigsRequest" -] = _LISTTRANSFERCONFIGSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListTransferConfigsResponse" -] = _LISTTRANSFERCONFIGSRESPONSE -DESCRIPTOR.message_types_by_name["ListTransferRunsRequest"] = _LISTTRANSFERRUNSREQUEST -DESCRIPTOR.message_types_by_name["ListTransferRunsResponse"] = _LISTTRANSFERRUNSRESPONSE -DESCRIPTOR.message_types_by_name["ListTransferLogsRequest"] = _LISTTRANSFERLOGSREQUEST -DESCRIPTOR.message_types_by_name["ListTransferLogsResponse"] = _LISTTRANSFERLOGSRESPONSE -DESCRIPTOR.message_types_by_name["CheckValidCredsRequest"] = _CHECKVALIDCREDSREQUEST -DESCRIPTOR.message_types_by_name["CheckValidCredsResponse"] = _CHECKVALIDCREDSRESPONSE -DESCRIPTOR.message_types_by_name[ - "ScheduleTransferRunsRequest" -] = _SCHEDULETRANSFERRUNSREQUEST -DESCRIPTOR.message_types_by_name[ - "ScheduleTransferRunsResponse" -] = _SCHEDULETRANSFERRUNSRESPONSE -DESCRIPTOR.message_types_by_name[ - "StartManualTransferRunsRequest" -] = _STARTMANUALTRANSFERRUNSREQUEST -DESCRIPTOR.message_types_by_name[ - "StartManualTransferRunsResponse" -] = _STARTMANUALTRANSFERRUNSRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -DataSourceParameter = _reflection.GeneratedProtocolMessageType( - "DataSourceParameter", - (_message.Message,), - dict( - DESCRIPTOR=_DATASOURCEPARAMETER, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""Represents a data source parameter with validation rules, - so that parameters can be rendered in the UI. These parameters are given - to us by supported data sources, and include all needed information for - rendering and validation. Thus, whoever uses this api can decide to - generate either generic ui, or custom data source specific forms. - - - Attributes: - param_id: - Parameter identifier. - display_name: - Parameter display name in the user interface. - description: - Parameter description. - type: - Parameter type. - required: - Is parameter required. - repeated: - Deprecated. This field has no effect. - validation_regex: - Regular expression which can be used for parameter validation. - allowed_values: - All possible values for the parameter. - min_value: - For integer and double values specifies minimum allowed value. - max_value: - For integer and double values specifies maxminum allowed - value. - fields: - Deprecated. This field has no effect. - validation_description: - Description of the requirements for this field, in case the - user input does not fulfill the regex pattern or min/max - values. - validation_help_url: - URL to a help document to further explain the naming - requirements. - immutable: - Cannot be changed after initial creation. - recurse: - Deprecated. This field has no effect. - deprecated: - If true, it should not be used in new transfers, and it should - not be visible to users. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DataSourceParameter) - ), -) -_sym_db.RegisterMessage(DataSourceParameter) - -DataSource = _reflection.GeneratedProtocolMessageType( - "DataSource", - (_message.Message,), - dict( - DESCRIPTOR=_DATASOURCE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""Represents data source metadata. Metadata is sufficient to - render UI and request proper OAuth tokens. - - - Attributes: - name: - Output only. Data source resource name. - data_source_id: - Data source id. - display_name: - User friendly data source name. - description: - User friendly data source description string. - client_id: - Data source client id which should be used to receive refresh - token. - scopes: - Api auth scopes for which refresh token needs to be obtained. - These are scopes needed by a data source to prepare data and - ingest them into BigQuery, e.g., - https://www.googleapis.com/auth/bigquery - transfer_type: - Deprecated. This field has no effect. - supports_multiple_transfers: - Deprecated. This field has no effect. - update_deadline_seconds: - The number of seconds to wait for an update from the data - source before the Data Transfer Service marks the transfer as - FAILED. - default_schedule: - Default data transfer schedule. Examples of valid schedules - include: ``1st,3rd monday of month 15:30``, ``every wed,fri of - jan,jun 13:15``, and ``first sunday of quarter 00:00``. - supports_custom_schedule: - Specifies whether the data source supports a user defined - schedule, or operates on the default schedule. When set to - ``true``, user can override default schedule. - parameters: - Data source parameters. - help_url: - Url for the help document for this data source. - authorization_type: - Indicates the type of authorization. - data_refresh_type: - Specifies whether the data source supports automatic data - refresh for the past few days, and how it's supported. For - some data sources, data might not be complete until a few days - later, so it's useful to refresh data automatically. - default_data_refresh_window_days: - Default data refresh window on days. Only meaningful when - ``data_refresh_type`` = ``SLIDING_WINDOW``. - manual_runs_disabled: - Disables backfilling and manual run scheduling for the data - source. - minimum_schedule_interval: - The minimum interval for scheduler to schedule runs. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DataSource) - ), -) -_sym_db.RegisterMessage(DataSource) - -GetDataSourceRequest = _reflection.GeneratedProtocolMessageType( - "GetDataSourceRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETDATASOURCEREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to get data source info. - - - Attributes: - name: - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/dataSources/{data_source_id}`` or ``pr - ojects/{project_id}/locations/{location_id}/dataSources/{data_ - source_id}`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest) - ), -) -_sym_db.RegisterMessage(GetDataSourceRequest) - -ListDataSourcesRequest = _reflection.GeneratedProtocolMessageType( - "ListDataSourcesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTDATASOURCESREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""Request to list supported data sources and their data - transfer settings. - - - Attributes: - parent: - Required. The BigQuery project id for which data sources - should be returned. Must be in the form: - ``projects/{project_id}`` or - \`projects/{project\_id}/locations/{location\_id} - page_token: - Pagination token, which can be used to request a specific page - of ``ListDataSourcesRequest`` list results. For multiple-page - results, ``ListDataSourcesResponse`` outputs a ``next_page`` - token, which can be used as the ``page_token`` value to - request the next page of list results. - page_size: - Page size. The default page size is the maximum value of 1000 - results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest) - ), -) -_sym_db.RegisterMessage(ListDataSourcesRequest) - -ListDataSourcesResponse = _reflection.GeneratedProtocolMessageType( - "ListDataSourcesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTDATASOURCESRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""Returns list of supported data sources and their metadata. - - - Attributes: - data_sources: - List of supported data sources and their transfer settings. - next_page_token: - Output only. The next-pagination token. For multiple-page list - results, this token can be used as the - ``ListDataSourcesRequest.page_token`` to request the next page - of list results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse) - ), -) -_sym_db.RegisterMessage(ListDataSourcesResponse) - -CreateTransferConfigRequest = _reflection.GeneratedProtocolMessageType( - "CreateTransferConfigRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATETRANSFERCONFIGREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to create a data transfer configuration. If new - credentials are needed for this transfer configuration, an authorization - code must be provided. If an authorization code is provided, the - transfer configuration will be associated with the user id corresponding - to the authorization code. Otherwise, the transfer configuration will be - associated with the calling user. - - - Attributes: - parent: - Required. The BigQuery project id where the transfer - configuration should be created. Must be in the format - projects/{project\_id}/locations/{location\_id} or - projects/{project\_id}. If specified location and location of - the destination bigquery dataset do not match - the request - will fail. - transfer_config: - Required. Data transfer configuration to create. - authorization_code: - Optional OAuth2 authorization code to use with this transfer - configuration. This is required if new credentials are needed, - as indicated by ``CheckValidCreds``. In order to obtain - authorization\_code, please make a request to https://www.gsta - tic.com/bigquerydatatransfer/oauthz/auth?client\_id=&scope=&re - direct\_uri= - client\_id should be OAuth client\_id of - BigQuery DTS API for the given data source returned by - ListDataSources method. - data\_source\_scopes are the scopes - returned by ListDataSources method. - redirect\_uri is an - optional parameter. If not specified, then authorization - code is posted to the opener of authorization flow window. - Otherwise it will be sent to the redirect uri. A special - value of urn:ietf:wg:oauth:2.0:oob means that authorization - code should be returned in the title bar of the browser, - with the page text prompting the user to copy the code and - paste it in the application. - version_info: - Optional version info. If users want to find a very recent - access token, that is, immediately after approving access, - users have to set the version\_info claim in the token - request. To obtain the version\_info, users must use the - "none+gsession" response type. which be return a version\_info - back in the authorization response which be be put in a JWT - claim in the token request. - service_account_name: - Optional service account name. If this field is set, transfer - config will be created with this service account credentials. - It requires that requesting user calling this API has - permissions to act as this service account. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest) - ), -) -_sym_db.RegisterMessage(CreateTransferConfigRequest) - -UpdateTransferConfigRequest = _reflection.GeneratedProtocolMessageType( - "UpdateTransferConfigRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATETRANSFERCONFIGREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to update a transfer configuration. To update - the user id of the transfer configuration, an authorization code needs - to be provided. - - - Attributes: - transfer_config: - Required. Data transfer configuration to create. - authorization_code: - Optional OAuth2 authorization code to use with this transfer - configuration. If it is provided, the transfer configuration - will be associated with the authorizing user. In order to - obtain authorization\_code, please make a request to https://w - ww.gstatic.com/bigquerydatatransfer/oauthz/auth?client\_id=&sc - ope=&redirect\_uri= - client\_id should be OAuth client\_id - of BigQuery DTS API for the given data source returned by - ListDataSources method. - data\_source\_scopes are the scopes - returned by ListDataSources method. - redirect\_uri is an - optional parameter. If not specified, then authorization - code is posted to the opener of authorization flow window. - Otherwise it will be sent to the redirect uri. A special - value of urn:ietf:wg:oauth:2.0:oob means that authorization - code should be returned in the title bar of the browser, - with the page text prompting the user to copy the code and - paste it in the application. - update_mask: - Required. Required list of fields to be updated in this - request. - version_info: - Optional version info. If users want to find a very recent - access token, that is, immediately after approving access, - users have to set the version\_info claim in the token - request. To obtain the version\_info, users must use the - "none+gsession" response type. which be return a version\_info - back in the authorization response which be be put in a JWT - claim in the token request. - service_account_name: - Optional service account name. If this field is set and - "service\_account\_name" is set in update\_mask, transfer - config will be updated to use this service account - credentials. It requires that requesting user calling this API - has permissions to act as this service account. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest) - ), -) -_sym_db.RegisterMessage(UpdateTransferConfigRequest) - -GetTransferConfigRequest = _reflection.GeneratedProtocolMessageType( - "GetTransferConfigRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETTRANSFERCONFIGREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to get data transfer information. - - - Attributes: - name: - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` or ``pro - jects/{project_id}/locations/{location_id}/transferConfigs/{co - nfig_id}`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest) - ), -) -_sym_db.RegisterMessage(GetTransferConfigRequest) - -DeleteTransferConfigRequest = _reflection.GeneratedProtocolMessageType( - "DeleteTransferConfigRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETETRANSFERCONFIGREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to delete data transfer information. All - associated transfer runs and log messages will be deleted as well. - - - Attributes: - name: - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` or ``pro - jects/{project_id}/locations/{location_id}/transferConfigs/{co - nfig_id}`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest) - ), -) -_sym_db.RegisterMessage(DeleteTransferConfigRequest) - -GetTransferRunRequest = _reflection.GeneratedProtocolMessageType( - "GetTransferRunRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETTRANSFERRUNREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to get data transfer run information. - - - Attributes: - name: - Required. The field will contain name of the resource - requested, for example: ``projects/{project_id}/transferConfig - s/{config_id}/runs/{run_id}`` or ``projects/{project_id}/locat - ions/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest) - ), -) -_sym_db.RegisterMessage(GetTransferRunRequest) - -DeleteTransferRunRequest = _reflection.GeneratedProtocolMessageType( - "DeleteTransferRunRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETETRANSFERRUNREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to delete data transfer run information. - - - Attributes: - name: - Required. The field will contain name of the resource - requested, for example: ``projects/{project_id}/transferConfig - s/{config_id}/runs/{run_id}`` or ``projects/{project_id}/locat - ions/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest) - ), -) -_sym_db.RegisterMessage(DeleteTransferRunRequest) - -ListTransferConfigsRequest = _reflection.GeneratedProtocolMessageType( - "ListTransferConfigsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTTRANSFERCONFIGSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to list data transfers configured for a BigQuery - project. - - - Attributes: - parent: - Required. The BigQuery project id for which data sources - should be returned: ``projects/{project_id}`` or - ``projects/{project_id}/locations/{location_id}`` - data_source_ids: - When specified, only configurations of requested data sources - are returned. - page_token: - Pagination token, which can be used to request a specific page - of ``ListTransfersRequest`` list results. For multiple-page - results, ``ListTransfersResponse`` outputs a ``next_page`` - token, which can be used as the ``page_token`` value to - request the next page of list results. - page_size: - Page size. The default page size is the maximum value of 1000 - results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest) - ), -) -_sym_db.RegisterMessage(ListTransferConfigsRequest) - -ListTransferConfigsResponse = _reflection.GeneratedProtocolMessageType( - "ListTransferConfigsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTTRANSFERCONFIGSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""The returned list of pipelines in the project. - - - Attributes: - transfer_configs: - Output only. The stored pipeline transfer configurations. - next_page_token: - Output only. The next-pagination token. For multiple-page list - results, this token can be used as the - ``ListTransferConfigsRequest.page_token`` to request the next - page of list results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse) - ), -) -_sym_db.RegisterMessage(ListTransferConfigsResponse) - -ListTransferRunsRequest = _reflection.GeneratedProtocolMessageType( - "ListTransferRunsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTTRANSFERRUNSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to list data transfer runs. UI can use this - method to show/filter specific data transfer runs. The data source can - use this method to request all scheduled transfer runs. - - - Attributes: - parent: - Required. Name of transfer configuration for which transfer - runs should be retrieved. Format of transfer configuration - resource name is: - ``projects/{project_id}/transferConfigs/{config_id}`` or ``pro - jects/{project_id}/locations/{location_id}/transferConfigs/{co - nfig_id}``. - states: - When specified, only transfer runs with requested states are - returned. - page_token: - Pagination token, which can be used to request a specific page - of ``ListTransferRunsRequest`` list results. For multiple-page - results, ``ListTransferRunsResponse`` outputs a ``next_page`` - token, which can be used as the ``page_token`` value to - request the next page of list results. - page_size: - Page size. The default page size is the maximum value of 1000 - results. - run_attempt: - Indicates how run attempts are to be pulled. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest) - ), -) -_sym_db.RegisterMessage(ListTransferRunsRequest) - -ListTransferRunsResponse = _reflection.GeneratedProtocolMessageType( - "ListTransferRunsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTTRANSFERRUNSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""The returned list of pipelines in the project. - - - Attributes: - transfer_runs: - Output only. The stored pipeline transfer runs. - next_page_token: - Output only. The next-pagination token. For multiple-page list - results, this token can be used as the - ``ListTransferRunsRequest.page_token`` to request the next - page of list results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse) - ), -) -_sym_db.RegisterMessage(ListTransferRunsResponse) - -ListTransferLogsRequest = _reflection.GeneratedProtocolMessageType( - "ListTransferLogsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTTRANSFERLOGSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to get user facing log messages associated with - data transfer run. - - - Attributes: - parent: - Required. Transfer run name in the form: ``projects/{project_i - d}/transferConfigs/{config_id}/runs/{run_id}`` or ``projects/{ - project_id}/locations/{location_id}/transferConfigs/{config_id - }/runs/{run_id}`` - page_token: - Pagination token, which can be used to request a specific page - of ``ListTransferLogsRequest`` list results. For multiple-page - results, ``ListTransferLogsResponse`` outputs a ``next_page`` - token, which can be used as the ``page_token`` value to - request the next page of list results. - page_size: - Page size. The default page size is the maximum value of 1000 - results. - message_types: - Message types to return. If not populated - INFO, WARNING and - ERROR messages are returned. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest) - ), -) -_sym_db.RegisterMessage(ListTransferLogsRequest) - -ListTransferLogsResponse = _reflection.GeneratedProtocolMessageType( - "ListTransferLogsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTTRANSFERLOGSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""The returned list transfer run messages. - - - Attributes: - transfer_messages: - Output only. The stored pipeline transfer messages. - next_page_token: - Output only. The next-pagination token. For multiple-page list - results, this token can be used as the - ``GetTransferRunLogRequest.page_token`` to request the next - page of list results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse) - ), -) -_sym_db.RegisterMessage(ListTransferLogsResponse) - -CheckValidCredsRequest = _reflection.GeneratedProtocolMessageType( - "CheckValidCredsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CHECKVALIDCREDSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to determine whether the user has valid - credentials. This method is used to limit the number of OAuth popups in - the user interface. The user id is inferred from the API call context. - If the data source has the Google+ authorization type, this method - returns false, as it cannot be determined whether the credentials are - already valid merely based on the user id. - - - Attributes: - name: - Required. The data source in the form: - ``projects/{project_id}/dataSources/{data_source_id}`` or ``pr - ojects/{project_id}/locations/{location_id}/dataSources/{data_ - source_id}``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest) - ), -) -_sym_db.RegisterMessage(CheckValidCredsRequest) - -CheckValidCredsResponse = _reflection.GeneratedProtocolMessageType( - "CheckValidCredsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_CHECKVALIDCREDSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A response indicating whether the credentials exist and - are valid. - - - Attributes: - has_valid_creds: - If set to ``true``, the credentials exist and are valid. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse) - ), -) -_sym_db.RegisterMessage(CheckValidCredsResponse) - -ScheduleTransferRunsRequest = _reflection.GeneratedProtocolMessageType( - "ScheduleTransferRunsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_SCHEDULETRANSFERRUNSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to schedule transfer runs for a time range. - - - Attributes: - parent: - Required. Transfer configuration name in the form: - ``projects/{project_id}/transferConfigs/{config_id}`` or ``pro - jects/{project_id}/locations/{location_id}/transferConfigs/{co - nfig_id}``. - start_time: - Required. Start time of the range of transfer runs. For - example, ``"2017-05-25T00:00:00+00:00"``. - end_time: - Required. End time of the range of transfer runs. For example, - ``"2017-05-30T00:00:00+00:00"``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest) - ), -) -_sym_db.RegisterMessage(ScheduleTransferRunsRequest) - -ScheduleTransferRunsResponse = _reflection.GeneratedProtocolMessageType( - "ScheduleTransferRunsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_SCHEDULETRANSFERRUNSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A response to schedule transfer runs for a time range. - - - Attributes: - runs: - The transfer runs that were scheduled. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse) - ), -) -_sym_db.RegisterMessage(ScheduleTransferRunsResponse) - -StartManualTransferRunsRequest = _reflection.GeneratedProtocolMessageType( - "StartManualTransferRunsRequest", - (_message.Message,), - dict( - TimeRange=_reflection.GeneratedProtocolMessageType( - "TimeRange", - (_message.Message,), - dict( - DESCRIPTOR=_STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A specification for a time range, this will request - transfer runs with run\_time between start\_time (inclusive) and - end\_time (exclusive). - - - Attributes: - start_time: - Start time of the range of transfer runs. For example, - ``"2017-05-25T00:00:00+00:00"``. The start\_time must be - strictly less than the end\_time. Creates transfer runs where - run\_time is in the range betwen start\_time (inclusive) and - end\_time (exlusive). - end_time: - End time of the range of transfer runs. For example, - ``"2017-05-30T00:00:00+00:00"``. The end\_time must not be in - the future. Creates transfer runs where run\_time is in the - range betwen start\_time (inclusive) and end\_time (exlusive). - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange) - ), - ), - DESCRIPTOR=_STARTMANUALTRANSFERRUNSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to start manual transfer runs. - - - Attributes: - parent: - Transfer configuration name in the form: - ``projects/{project_id}/transferConfigs/{config_id}`` or ``pro - jects/{project_id}/locations/{location_id}/transferConfigs/{co - nfig_id}``. - time: - The requested time specification - this can be a time range or - a specific run\_time. - requested_time_range: - Time range for the transfer runs that should be started. - requested_run_time: - Specific run\_time for a transfer run to be started. The - requested\_run\_time must not be in the future. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest) - ), -) -_sym_db.RegisterMessage(StartManualTransferRunsRequest) -_sym_db.RegisterMessage(StartManualTransferRunsRequest.TimeRange) - -StartManualTransferRunsResponse = _reflection.GeneratedProtocolMessageType( - "StartManualTransferRunsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_STARTMANUALTRANSFERRUNSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A response to start manual transfer runs. - - - Attributes: - runs: - The transfer runs that were created. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse) - ), -) -_sym_db.RegisterMessage(StartManualTransferRunsResponse) - - -DESCRIPTOR._options = None -_DATASOURCE.fields_by_name["name"]._options = None -_DATASOURCE.fields_by_name["transfer_type"]._options = None -_DATASOURCE.fields_by_name["supports_multiple_transfers"]._options = None -_DATASOURCE._options = None -_GETDATASOURCEREQUEST.fields_by_name["name"]._options = None -_LISTDATASOURCESREQUEST.fields_by_name["parent"]._options = None -_LISTDATASOURCESRESPONSE.fields_by_name["next_page_token"]._options = None -_CREATETRANSFERCONFIGREQUEST.fields_by_name["parent"]._options = None -_CREATETRANSFERCONFIGREQUEST.fields_by_name["transfer_config"]._options = None -_UPDATETRANSFERCONFIGREQUEST.fields_by_name["transfer_config"]._options = None -_UPDATETRANSFERCONFIGREQUEST.fields_by_name["update_mask"]._options = None -_GETTRANSFERCONFIGREQUEST.fields_by_name["name"]._options = None -_DELETETRANSFERCONFIGREQUEST.fields_by_name["name"]._options = None -_GETTRANSFERRUNREQUEST.fields_by_name["name"]._options = None -_DELETETRANSFERRUNREQUEST.fields_by_name["name"]._options = None -_LISTTRANSFERCONFIGSREQUEST.fields_by_name["parent"]._options = None -_LISTTRANSFERCONFIGSRESPONSE.fields_by_name["transfer_configs"]._options = None -_LISTTRANSFERCONFIGSRESPONSE.fields_by_name["next_page_token"]._options = None -_LISTTRANSFERRUNSREQUEST.fields_by_name["parent"]._options = None -_LISTTRANSFERRUNSRESPONSE.fields_by_name["transfer_runs"]._options = None -_LISTTRANSFERRUNSRESPONSE.fields_by_name["next_page_token"]._options = None -_LISTTRANSFERLOGSREQUEST.fields_by_name["parent"]._options = None -_LISTTRANSFERLOGSRESPONSE.fields_by_name["transfer_messages"]._options = None -_LISTTRANSFERLOGSRESPONSE.fields_by_name["next_page_token"]._options = None -_CHECKVALIDCREDSREQUEST.fields_by_name["name"]._options = None -_SCHEDULETRANSFERRUNSREQUEST.fields_by_name["parent"]._options = None -_SCHEDULETRANSFERRUNSREQUEST.fields_by_name["start_time"]._options = None -_SCHEDULETRANSFERRUNSREQUEST.fields_by_name["end_time"]._options = None -_STARTMANUALTRANSFERRUNSREQUEST.fields_by_name["parent"]._options = None - -_DATATRANSFERSERVICE = _descriptor.ServiceDescriptor( - name="DataTransferService", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A#bigquerydatatransfer.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" - ), - serialized_start=5659, - serialized_end=9694, - methods=[ - _descriptor.MethodDescriptor( - name="GetDataSource", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.GetDataSource", - index=0, - containing_service=None, - input_type=_GETDATASOURCEREQUEST, - output_type=_DATASOURCE, - serialized_options=_b( - "\202\323\344\223\002X\022//v1/{name=projects/*/locations/*/dataSources/*}Z%\022#/v1/{name=projects/*/dataSources/*}\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="ListDataSources", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ListDataSources", - index=1, - containing_service=None, - input_type=_LISTDATASOURCESREQUEST, - output_type=_LISTDATASOURCESRESPONSE, - serialized_options=_b( - "\202\323\344\223\002X\022//v1/{parent=projects/*/locations/*}/dataSourcesZ%\022#/v1/{parent=projects/*}/dataSources\332A\006parent" - ), - ), - _descriptor.MethodDescriptor( - name="CreateTransferConfig", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.CreateTransferConfig", - index=2, - containing_service=None, - input_type=_CREATETRANSFERCONFIGREQUEST, - output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, - serialized_options=_b( - '\202\323\344\223\002\202\001"3/v1/{parent=projects/*/locations/*}/transferConfigs:\017transfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\017transfer_config\332A\026parent,transfer_config' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateTransferConfig", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.UpdateTransferConfig", - index=3, - containing_service=None, - input_type=_UPDATETRANSFERCONFIGREQUEST, - output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, - serialized_options=_b( - "\202\323\344\223\002\242\0012C/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\017transfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\017transfer_config\332A\033transfer_config,update_mask" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteTransferConfig", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferConfig", - index=4, - containing_service=None, - input_type=_DELETETRANSFERCONFIGREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*'/v1/{name=projects/*/transferConfigs/*}\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="GetTransferConfig", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferConfig", - index=5, - containing_service=None, - input_type=_GETTRANSFERCONFIGREQUEST, - output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, - serialized_options=_b( - "\202\323\344\223\002`\0223/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\022'/v1/{name=projects/*/transferConfigs/*}\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="ListTransferConfigs", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferConfigs", - index=6, - containing_service=None, - input_type=_LISTTRANSFERCONFIGSREQUEST, - output_type=_LISTTRANSFERCONFIGSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002`\0223/v1/{parent=projects/*/locations/*}/transferConfigsZ)\022'/v1/{parent=projects/*}/transferConfigs\332A\006parent" - ), - ), - _descriptor.MethodDescriptor( - name="ScheduleTransferRuns", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ScheduleTransferRuns", - index=7, - containing_service=None, - input_type=_SCHEDULETRANSFERRUNSREQUEST, - output_type=_SCHEDULETRANSFERRUNSRESPONSE, - serialized_options=_b( - '\210\002\001\202\323\344\223\002\204\001"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\001*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\001*\332A\032parent,start_time,end_time' - ), - ), - _descriptor.MethodDescriptor( - name="StartManualTransferRuns", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.StartManualTransferRuns", - index=8, - containing_service=None, - input_type=_STARTMANUALTRANSFERRUNSREQUEST, - output_type=_STARTMANUALTRANSFERRUNSRESPONSE, - serialized_options=_b( - '\202\323\344\223\002\212\001"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\001*Z>"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="GetTransferRun", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferRun", - index=9, - containing_service=None, - input_type=_GETTRANSFERRUNREQUEST, - output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN, - serialized_options=_b( - "\202\323\344\223\002n\022:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\022./v1/{name=projects/*/transferConfigs/*/runs/*}\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteTransferRun", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferRun", - index=10, - containing_service=None, - input_type=_DELETETRANSFERRUNREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="ListTransferRuns", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferRuns", - index=11, - containing_service=None, - input_type=_LISTTRANSFERRUNSREQUEST, - output_type=_LISTTRANSFERRUNSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002n\022:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\022./v1/{parent=projects/*/transferConfigs/*}/runs\332A\006parent" - ), - ), - _descriptor.MethodDescriptor( - name="ListTransferLogs", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferLogs", - index=12, - containing_service=None, - input_type=_LISTTRANSFERLOGSREQUEST, - output_type=_LISTTRANSFERLOGSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002\214\001\022I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\022=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\332A\006parent" - ), - ), - _descriptor.MethodDescriptor( - name="CheckValidCreds", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.CheckValidCreds", - index=13, - containing_service=None, - input_type=_CHECKVALIDCREDSREQUEST, - output_type=_CHECKVALIDCREDSRESPONSE, - serialized_options=_b( - '\202\323\344\223\002~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\001*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\001*\332A\004name' - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_DATATRANSFERSERVICE) - -DESCRIPTOR.services_by_name["DataTransferService"] = _DATATRANSFERSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py deleted file mode 100644 index ab525e22457c..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py +++ /dev/null @@ -1,296 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.bigquery_datatransfer_v1.proto import ( - datatransfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2, -) -from google.cloud.bigquery_datatransfer_v1.proto import ( - transfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class DataTransferServiceStub(object): - """The Google BigQuery Data Transfer Service API enables BigQuery users to - configure the transfer of their data from other Google Products into - BigQuery. This service contains methods that are end user exposed. It backs - up the frontend. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.GetDataSource = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetDataSource", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetDataSourceRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DataSource.FromString, - ) - self.ListDataSources = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListDataSources", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesResponse.FromString, - ) - self.CreateTransferConfig = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CreateTransferConfig", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.CreateTransferConfigRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, - ) - self.UpdateTransferConfig = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/UpdateTransferConfig", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.UpdateTransferConfigRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, - ) - self.DeleteTransferConfig = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferConfigRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.GetTransferConfig = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferConfig", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferConfigRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, - ) - self.ListTransferConfigs = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferConfigs", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsResponse.FromString, - ) - self.ScheduleTransferRuns = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ScheduleTransferRuns", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsResponse.FromString, - ) - self.StartManualTransferRuns = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/StartManualTransferRuns", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsResponse.FromString, - ) - self.GetTransferRun = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferRun", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferRunRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferRun.FromString, - ) - self.DeleteTransferRun = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferRunRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ListTransferRuns = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferRuns", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsResponse.FromString, - ) - self.ListTransferLogs = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferLogs", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsResponse.FromString, - ) - self.CheckValidCreds = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CheckValidCreds", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsResponse.FromString, - ) - - -class DataTransferServiceServicer(object): - """The Google BigQuery Data Transfer Service API enables BigQuery users to - configure the transfer of their data from other Google Products into - BigQuery. This service contains methods that are end user exposed. It backs - up the frontend. - """ - - def GetDataSource(self, request, context): - """Retrieves a supported data source and returns its settings, - which can be used for UI rendering. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListDataSources(self, request, context): - """Lists supported data sources and returns their settings, - which can be used for UI rendering. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateTransferConfig(self, request, context): - """Creates a new data transfer configuration. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateTransferConfig(self, request, context): - """Updates a data transfer configuration. - All fields must be set, even if they are not updated. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteTransferConfig(self, request, context): - """Deletes a data transfer configuration, - including any associated transfer runs and logs. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetTransferConfig(self, request, context): - """Returns information about a data transfer config. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTransferConfigs(self, request, context): - """Returns information about all data transfers in the project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ScheduleTransferRuns(self, request, context): - """Creates transfer runs for a time range [start_time, end_time]. - For each date - or whatever granularity the data source supports - in the - range, one transfer run is created. - Note that runs are created per UTC time in the time range. - DEPRECATED: use StartManualTransferRuns instead. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def StartManualTransferRuns(self, request, context): - """Start manual transfer runs to be executed now with schedule_time equal to - current time. The transfer runs can be created for a time range where the - run_time is between start_time (inclusive) and end_time (exclusive), or for - a specific run_time. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetTransferRun(self, request, context): - """Returns information about the particular transfer run. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteTransferRun(self, request, context): - """Deletes the specified transfer run. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTransferRuns(self, request, context): - """Returns information about running and completed jobs. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTransferLogs(self, request, context): - """Returns user facing log messages for the data transfer run. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CheckValidCreds(self, request, context): - """Returns true if valid credentials exist for the given data source and - requesting user. - Some data sources doesn't support service account, so we need to talk to - them on behalf of the end user. This API just checks whether we have OAuth - token for the particular user, which is a pre-requisite before user can - create a transfer config. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_DataTransferServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "GetDataSource": grpc.unary_unary_rpc_method_handler( - servicer.GetDataSource, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetDataSourceRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DataSource.SerializeToString, - ), - "ListDataSources": grpc.unary_unary_rpc_method_handler( - servicer.ListDataSources, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesResponse.SerializeToString, - ), - "CreateTransferConfig": grpc.unary_unary_rpc_method_handler( - servicer.CreateTransferConfig, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.CreateTransferConfigRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.SerializeToString, - ), - "UpdateTransferConfig": grpc.unary_unary_rpc_method_handler( - servicer.UpdateTransferConfig, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.UpdateTransferConfigRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.SerializeToString, - ), - "DeleteTransferConfig": grpc.unary_unary_rpc_method_handler( - servicer.DeleteTransferConfig, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferConfigRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "GetTransferConfig": grpc.unary_unary_rpc_method_handler( - servicer.GetTransferConfig, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferConfigRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.SerializeToString, - ), - "ListTransferConfigs": grpc.unary_unary_rpc_method_handler( - servicer.ListTransferConfigs, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsResponse.SerializeToString, - ), - "ScheduleTransferRuns": grpc.unary_unary_rpc_method_handler( - servicer.ScheduleTransferRuns, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsResponse.SerializeToString, - ), - "StartManualTransferRuns": grpc.unary_unary_rpc_method_handler( - servicer.StartManualTransferRuns, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsResponse.SerializeToString, - ), - "GetTransferRun": grpc.unary_unary_rpc_method_handler( - servicer.GetTransferRun, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferRunRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferRun.SerializeToString, - ), - "DeleteTransferRun": grpc.unary_unary_rpc_method_handler( - servicer.DeleteTransferRun, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferRunRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "ListTransferRuns": grpc.unary_unary_rpc_method_handler( - servicer.ListTransferRuns, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsResponse.SerializeToString, - ), - "ListTransferLogs": grpc.unary_unary_rpc_method_handler( - servicer.ListTransferLogs, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsResponse.SerializeToString, - ), - "CheckValidCreds": grpc.unary_unary_rpc_method_handler( - servicer.CheckValidCreds, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.bigquery.datatransfer.v1.DataTransferService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto deleted file mode 100644 index 6e503b6dea59..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto +++ /dev/null @@ -1,285 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.datatransfer.v1; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/protobuf/struct.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/status.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer"; -option java_multiple_files = true; -option java_outer_classname = "TransferProto"; -option java_package = "com.google.cloud.bigquery.datatransfer.v1"; -option objc_class_prefix = "GCBDT"; -option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1"; - -// DEPRECATED. Represents data transfer type. -enum TransferType { - option deprecated = true; - - // Invalid or Unknown transfer type placeholder. - TRANSFER_TYPE_UNSPECIFIED = 0; - - // Batch data transfer. - BATCH = 1; - - // Streaming data transfer. Streaming data source currently doesn't - // support multiple transfer configs per project. - STREAMING = 2; -} - -// Represents data transfer run state. -enum TransferState { - // State placeholder. - TRANSFER_STATE_UNSPECIFIED = 0; - - // Data transfer is scheduled and is waiting to be picked up by - // data transfer backend. - PENDING = 2; - - // Data transfer is in progress. - RUNNING = 3; - - // Data transfer completed successfully. - SUCCEEDED = 4; - - // Data transfer failed. - FAILED = 5; - - // Data transfer is cancelled. - CANCELLED = 6; -} - -// Represents preferences for sending email notifications for transfer run -// events. -message EmailPreferences { - // If true, email notifications will be sent on transfer run failures. - bool enable_failure_email = 1; -} - -// Options customizing the data transfer schedule. -message ScheduleOptions { - // If true, automatic scheduling of data transfer runs for this configuration - // will be disabled. The runs can be started on ad-hoc basis using - // StartManualTransferRuns API. When automatic scheduling is disabled, the - // TransferConfig.schedule field will be ignored. - bool disable_auto_scheduling = 3; - - // Specifies time to start scheduling transfer runs. The first run will be - // scheduled at or after the start time according to a recurrence pattern - // defined in the schedule string. The start time can be changed at any - // moment. The time when a data transfer can be trigerred manually is not - // limited by this option. - google.protobuf.Timestamp start_time = 1; - - // Defines time to stop scheduling transfer runs. A transfer run cannot be - // scheduled at or after the end time. The end time can be changed at any - // moment. The time when a data transfer can be trigerred manually is not - // limited by this option. - google.protobuf.Timestamp end_time = 2; -} - -// Represents a data transfer configuration. A transfer configuration -// contains all metadata needed to perform a data transfer. For example, -// `destination_dataset_id` specifies where data should be stored. -// When a new transfer configuration is created, the specified -// `destination_dataset_id` is created when needed and shared with the -// appropriate data source service account. -message TransferConfig { - option (google.api.resource) = { - type: "bigquerydatatransfer.googleapis.com/TransferConfig" - pattern: "projects/{project}/transferConfigs/{transfer_config}" - pattern: "projects/{project}/locations/{location}/transferConfigs/{transfer_config}" - }; - - // The resource name of the transfer config. - // Transfer config names have the form of - // `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`. - // The name is automatically generated based on the config_id specified in - // CreateTransferConfigRequest along with project_id and region. If config_id - // is not provided, usually a uuid, even though it is not guaranteed or - // required, will be generated for config_id. - string name = 1; - - // The desination of the transfer config. - oneof destination { - // The BigQuery target dataset id. - string destination_dataset_id = 2; - } - - // User specified display name for the data transfer. - string display_name = 3; - - // Data source id. Cannot be changed once data transfer is created. - string data_source_id = 5; - - // Data transfer specific parameters. - google.protobuf.Struct params = 9; - - // Data transfer schedule. - // If the data source does not support a custom schedule, this should be - // empty. If it is empty, the default value for the data source will be - // used. - // The specified times are in UTC. - // Examples of valid format: - // `1st,3rd monday of month 15:30`, - // `every wed,fri of jan,jun 13:15`, and - // `first sunday of quarter 00:00`. - // See more explanation about the format here: - // https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format - // NOTE: the granularity should be at least 8 hours, or less frequent. - string schedule = 7; - - // Options customizing the data transfer schedule. - ScheduleOptions schedule_options = 24; - - // The number of days to look back to automatically refresh the data. - // For example, if `data_refresh_window_days = 10`, then every day - // BigQuery reingests data for [today-10, today-1], rather than ingesting data - // for just [today-1]. - // Only valid if the data source supports the feature. Set the value to 0 - // to use the default value. - int32 data_refresh_window_days = 12; - - // Is this config disabled. When set to true, no runs are scheduled - // for a given transfer. - bool disabled = 13; - - // Output only. Data transfer modification time. Ignored by server on input. - google.protobuf.Timestamp update_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Next time when data transfer will run. - google.protobuf.Timestamp next_run_time = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. State of the most recently updated transfer run. - TransferState state = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Deprecated. Unique ID of the user on whose behalf transfer is done. - int64 user_id = 11; - - // Output only. Region in which BigQuery dataset is located. - string dataset_region = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Pub/Sub topic where notifications will be sent after transfer runs - // associated with this transfer config finish. - string notification_pubsub_topic = 15; - - // Email notifications will be sent according to these preferences - // to the email address of the user who owns this transfer config. - EmailPreferences email_preferences = 18; -} - -// Represents a data transfer run. -message TransferRun { - option (google.api.resource) = { - type: "bigquerydatatransfer.googleapis.com/Run" - pattern: "projects/{project}/transferConfigs/{transfer_config}/runs/{run}" - pattern: "projects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}" - }; - - // The resource name of the transfer run. - // Transfer run names have the form - // `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`. - // The name is ignored when creating a transfer run. - string name = 1; - - // Minimum time after which a transfer run can be started. - google.protobuf.Timestamp schedule_time = 3; - - // For batch transfer runs, specifies the date and time of the data should be - // ingested. - google.protobuf.Timestamp run_time = 10; - - // Status of the transfer run. - google.rpc.Status error_status = 21; - - // Output only. Time when transfer run was started. - // Parameter ignored by server for input requests. - google.protobuf.Timestamp start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Time when transfer run ended. - // Parameter ignored by server for input requests. - google.protobuf.Timestamp end_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Last time the data transfer run state was updated. - google.protobuf.Timestamp update_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Data transfer specific parameters. - google.protobuf.Struct params = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Data transfer destination. - oneof destination { - // Output only. The BigQuery target dataset id. - string destination_dataset_id = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - } - - // Output only. Data source id. - string data_source_id = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Data transfer run state. Ignored for input requests. - TransferState state = 8; - - // Deprecated. Unique ID of the user on whose behalf transfer is done. - int64 user_id = 11; - - // Output only. Describes the schedule of this transfer run if it was - // created as part of a regular schedule. For batch transfer runs that are - // scheduled manually, this is empty. - // NOTE: the system might choose to delay the schedule depending on the - // current load, so `schedule_time` doesn't always match this. - string schedule = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Pub/Sub topic where a notification will be sent after this - // transfer run finishes - string notification_pubsub_topic = 23 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Email notifications will be sent according to these - // preferences to the email address of the user who owns the transfer config - // this run was derived from. - EmailPreferences email_preferences = 25 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Represents a user facing message for a particular data transfer run. -message TransferMessage { - // Represents data transfer user facing message severity. - enum MessageSeverity { - // No severity specified. - MESSAGE_SEVERITY_UNSPECIFIED = 0; - - // Informational message. - INFO = 1; - - // Warning message. - WARNING = 2; - - // Error message. - ERROR = 3; - } - - // Time when message was logged. - google.protobuf.Timestamp message_time = 1; - - // Message severity. - MessageSeverity severity = 2; - - // Message text. - string message_text = 3; -} diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py deleted file mode 100644 index d704bce890b2..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py +++ /dev/null @@ -1,1268 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery/datatransfer_v1/proto/transfer.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery/datatransfer_v1/proto/transfer.proto", - package="google.cloud.bigquery.datatransfer.v1", - syntax="proto3", - serialized_options=_b( - "\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\242\002\005GCBDT\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1" - ), - serialized_pb=_b( - '\n:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"0\n\x10\x45mailPreferences\x12\x1c\n\x14\x65nable_failure_email\x18\x01 \x01(\x08"\x90\x01\n\x0fScheduleOptions\x12\x1f\n\x17\x64isable_auto_scheduling\x18\x03 \x01(\x08\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xd7\x06\n\x0eTransferConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12 \n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tH\x00\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x05 \x01(\t\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x10\n\x08schedule\x18\x07 \x01(\t\x12P\n\x10schedule_options\x18\x18 \x01(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.ScheduleOptions\x12 \n\x18\x64\x61ta_refresh_window_days\x18\x0c \x01(\x05\x12\x10\n\x08\x64isabled\x18\r \x01(\x08\x12\x34\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x36\n\rnext_run_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x05state\x18\n \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferStateB\x03\xe0\x41\x03\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x1b\n\x0e\x64\x61taset_region\x18\x0e \x01(\tB\x03\xe0\x41\x03\x12!\n\x19notification_pubsub_topic\x18\x0f \x01(\t\x12R\n\x11\x65mail_preferences\x18\x12 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferences:\xb9\x01\xea\x41\xb5\x01\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x34projects/{project}/transferConfigs/{transfer_config}\x12Iprojects/{project}/locations/{location}/transferConfigs/{transfer_config}B\r\n\x0b\x64\x65stination"\xfa\x06\n\x0bTransferRun\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08run_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\x0c\x65rror_status\x18\x15 \x01(\x0b\x32\x12.google.rpc.Status\x12\x33\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12,\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.StructB\x03\xe0\x41\x03\x12%\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x03H\x00\x12\x1b\n\x0e\x64\x61ta_source_id\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x05state\x18\x08 \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x15\n\x08schedule\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12&\n\x19notification_pubsub_topic\x18\x17 \x01(\tB\x03\xe0\x41\x03\x12W\n\x11\x65mail_preferences\x18\x19 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferencesB\x03\xe0\x41\x03:\xc4\x01\xea\x41\xc0\x01\n\'bigquerydatatransfer.googleapis.com/Run\x12?projects/{project}/transferConfigs/{transfer_config}/runs/{run}\x12Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}B\r\n\x0b\x64\x65stination"\x8a\x02\n\x0fTransferMessage\x12\x30\n\x0cmessage_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12X\n\x08severity\x18\x02 \x01(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\x12\x14\n\x0cmessage_text\x18\x03 \x01(\t"U\n\x0fMessageSeverity\x12 \n\x1cMESSAGE_SEVERITY_UNSPECIFIED\x10\x00\x12\x08\n\x04INFO\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03*K\n\x0cTransferType\x12\x1d\n\x19TRANSFER_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41TCH\x10\x01\x12\r\n\tSTREAMING\x10\x02\x1a\x02\x18\x01*s\n\rTransferState\x12\x1e\n\x1aTRANSFER_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05\x12\r\n\tCANCELLED\x10\x06\x42\xe7\x01\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xa2\x02\x05GCBDT\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - ], -) - -_TRANSFERTYPE = _descriptor.EnumDescriptor( - name="TransferType", - full_name="google.cloud.bigquery.datatransfer.v1.TransferType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="TRANSFER_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="BATCH", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="STREAMING", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=_b("\030\001"), - serialized_start=2466, - serialized_end=2541, -) -_sym_db.RegisterEnumDescriptor(_TRANSFERTYPE) - -TransferType = enum_type_wrapper.EnumTypeWrapper(_TRANSFERTYPE) -_TRANSFERSTATE = _descriptor.EnumDescriptor( - name="TransferState", - full_name="google.cloud.bigquery.datatransfer.v1.TransferState", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="TRANSFER_STATE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="PENDING", index=1, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", index=2, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SUCCEEDED", index=3, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FAILED", index=4, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CANCELLED", index=5, number=6, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2543, - serialized_end=2658, -) -_sym_db.RegisterEnumDescriptor(_TRANSFERSTATE) - -TransferState = enum_type_wrapper.EnumTypeWrapper(_TRANSFERSTATE) -TRANSFER_TYPE_UNSPECIFIED = 0 -BATCH = 1 -STREAMING = 2 -TRANSFER_STATE_UNSPECIFIED = 0 -PENDING = 2 -RUNNING = 3 -SUCCEEDED = 4 -FAILED = 5 -CANCELLED = 6 - - -_TRANSFERMESSAGE_MESSAGESEVERITY = _descriptor.EnumDescriptor( - name="MessageSeverity", - full_name="google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="MESSAGE_SEVERITY_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="INFO", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="WARNING", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ERROR", index=3, number=3, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2379, - serialized_end=2464, -) -_sym_db.RegisterEnumDescriptor(_TRANSFERMESSAGE_MESSAGESEVERITY) - - -_EMAILPREFERENCES = _descriptor.Descriptor( - name="EmailPreferences", - full_name="google.cloud.bigquery.datatransfer.v1.EmailPreferences", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="enable_failure_email", - full_name="google.cloud.bigquery.datatransfer.v1.EmailPreferences.enable_failure_email", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=249, - serialized_end=297, -) - - -_SCHEDULEOPTIONS = _descriptor.Descriptor( - name="ScheduleOptions", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="disable_auto_scheduling", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleOptions.disable_auto_scheduling", - index=0, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleOptions.start_time", - index=1, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleOptions.end_time", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=300, - serialized_end=444, -) - - -_TRANSFERCONFIG = _descriptor.Descriptor( - name="TransferConfig", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="destination_dataset_id", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.destination_dataset_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.display_name", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data_source_id", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.data_source_id", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="params", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.params", - index=4, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="schedule", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.schedule", - index=5, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="schedule_options", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.schedule_options", - index=6, - number=24, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data_refresh_window_days", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.data_refresh_window_days", - index=7, - number=12, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="disabled", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.disabled", - index=8, - number=13, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.update_time", - index=9, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_run_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.next_run_time", - index=10, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.state", - index=11, - number=10, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="user_id", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.user_id", - index=12, - number=11, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dataset_region", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.dataset_region", - index=13, - number=14, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="notification_pubsub_topic", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.notification_pubsub_topic", - index=14, - number=15, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="email_preferences", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.email_preferences", - index=15, - number=18, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b( - "\352A\265\001\n2bigquerydatatransfer.googleapis.com/TransferConfig\0224projects/{project}/transferConfigs/{transfer_config}\022Iprojects/{project}/locations/{location}/transferConfigs/{transfer_config}" - ), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="destination", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.destination", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=447, - serialized_end=1302, -) - - -_TRANSFERRUN = _descriptor.Descriptor( - name="TransferRun", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="schedule_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.schedule_time", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="run_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.run_time", - index=2, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="error_status", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.error_status", - index=3, - number=21, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.start_time", - index=4, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.end_time", - index=5, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.update_time", - index=6, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="params", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.params", - index=7, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="destination_dataset_id", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.destination_dataset_id", - index=8, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data_source_id", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.data_source_id", - index=9, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.state", - index=10, - number=8, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="user_id", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.user_id", - index=11, - number=11, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="schedule", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.schedule", - index=12, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="notification_pubsub_topic", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.notification_pubsub_topic", - index=13, - number=23, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="email_preferences", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.email_preferences", - index=14, - number=25, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b( - "\352A\300\001\n'bigquerydatatransfer.googleapis.com/Run\022?projects/{project}/transferConfigs/{transfer_config}/runs/{run}\022Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}" - ), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="destination", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.destination", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=1305, - serialized_end=2195, -) - - -_TRANSFERMESSAGE = _descriptor.Descriptor( - name="TransferMessage", - full_name="google.cloud.bigquery.datatransfer.v1.TransferMessage", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="message_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferMessage.message_time", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="severity", - full_name="google.cloud.bigquery.datatransfer.v1.TransferMessage.severity", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="message_text", - full_name="google.cloud.bigquery.datatransfer.v1.TransferMessage.message_text", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_TRANSFERMESSAGE_MESSAGESEVERITY], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2198, - serialized_end=2464, -) - -_SCHEDULEOPTIONS.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SCHEDULEOPTIONS.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERCONFIG.fields_by_name[ - "params" -].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT -_TRANSFERCONFIG.fields_by_name["schedule_options"].message_type = _SCHEDULEOPTIONS -_TRANSFERCONFIG.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERCONFIG.fields_by_name[ - "next_run_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERCONFIG.fields_by_name["state"].enum_type = _TRANSFERSTATE -_TRANSFERCONFIG.fields_by_name["email_preferences"].message_type = _EMAILPREFERENCES -_TRANSFERCONFIG.oneofs_by_name["destination"].fields.append( - _TRANSFERCONFIG.fields_by_name["destination_dataset_id"] -) -_TRANSFERCONFIG.fields_by_name[ - "destination_dataset_id" -].containing_oneof = _TRANSFERCONFIG.oneofs_by_name["destination"] -_TRANSFERRUN.fields_by_name[ - "schedule_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERRUN.fields_by_name[ - "run_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERRUN.fields_by_name[ - "error_status" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_TRANSFERRUN.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERRUN.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERRUN.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERRUN.fields_by_name[ - "params" -].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT -_TRANSFERRUN.fields_by_name["state"].enum_type = _TRANSFERSTATE -_TRANSFERRUN.fields_by_name["email_preferences"].message_type = _EMAILPREFERENCES -_TRANSFERRUN.oneofs_by_name["destination"].fields.append( - _TRANSFERRUN.fields_by_name["destination_dataset_id"] -) -_TRANSFERRUN.fields_by_name[ - "destination_dataset_id" -].containing_oneof = _TRANSFERRUN.oneofs_by_name["destination"] -_TRANSFERMESSAGE.fields_by_name[ - "message_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERMESSAGE.fields_by_name["severity"].enum_type = _TRANSFERMESSAGE_MESSAGESEVERITY -_TRANSFERMESSAGE_MESSAGESEVERITY.containing_type = _TRANSFERMESSAGE -DESCRIPTOR.message_types_by_name["EmailPreferences"] = _EMAILPREFERENCES -DESCRIPTOR.message_types_by_name["ScheduleOptions"] = _SCHEDULEOPTIONS -DESCRIPTOR.message_types_by_name["TransferConfig"] = _TRANSFERCONFIG -DESCRIPTOR.message_types_by_name["TransferRun"] = _TRANSFERRUN -DESCRIPTOR.message_types_by_name["TransferMessage"] = _TRANSFERMESSAGE -DESCRIPTOR.enum_types_by_name["TransferType"] = _TRANSFERTYPE -DESCRIPTOR.enum_types_by_name["TransferState"] = _TRANSFERSTATE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -EmailPreferences = _reflection.GeneratedProtocolMessageType( - "EmailPreferences", - (_message.Message,), - dict( - DESCRIPTOR=_EMAILPREFERENCES, - __module__="google.cloud.bigquery.datatransfer_v1.proto.transfer_pb2", - __doc__="""Represents preferences for sending email notifications for - transfer run events. - - - Attributes: - enable_failure_email: - If true, email notifications will be sent on transfer run - failures. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.EmailPreferences) - ), -) -_sym_db.RegisterMessage(EmailPreferences) - -ScheduleOptions = _reflection.GeneratedProtocolMessageType( - "ScheduleOptions", - (_message.Message,), - dict( - DESCRIPTOR=_SCHEDULEOPTIONS, - __module__="google.cloud.bigquery.datatransfer_v1.proto.transfer_pb2", - __doc__="""Options customizing the data transfer schedule. - - - Attributes: - disable_auto_scheduling: - If true, automatic scheduling of data transfer runs for this - configuration will be disabled. The runs can be started on ad- - hoc basis using StartManualTransferRuns API. When automatic - scheduling is disabled, the TransferConfig.schedule field will - be ignored. - start_time: - Specifies time to start scheduling transfer runs. The first - run will be scheduled at or after the start time according to - a recurrence pattern defined in the schedule string. The start - time can be changed at any moment. The time when a data - transfer can be trigerred manually is not limited by this - option. - end_time: - Defines time to stop scheduling transfer runs. A transfer run - cannot be scheduled at or after the end time. The end time can - be changed at any moment. The time when a data transfer can be - trigerred manually is not limited by this option. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ScheduleOptions) - ), -) -_sym_db.RegisterMessage(ScheduleOptions) - -TransferConfig = _reflection.GeneratedProtocolMessageType( - "TransferConfig", - (_message.Message,), - dict( - DESCRIPTOR=_TRANSFERCONFIG, - __module__="google.cloud.bigquery.datatransfer_v1.proto.transfer_pb2", - __doc__="""Represents a data transfer configuration. A transfer - configuration contains all metadata needed to perform a data transfer. - For example, ``destination_dataset_id`` specifies where data should be - stored. When a new transfer configuration is created, the specified - ``destination_dataset_id`` is created when needed and shared with the - appropriate data source service account. - - - Attributes: - name: - The resource name of the transfer config. Transfer config - names have the form of ``projects/{project_id}/locations/{regi - on}/transferConfigs/{config_id}``. The name is automatically - generated based on the config\_id specified in - CreateTransferConfigRequest along with project\_id and region. - If config\_id is not provided, usually a uuid, even though it - is not guaranteed or required, will be generated for - config\_id. - destination: - The desination of the transfer config. - destination_dataset_id: - The BigQuery target dataset id. - display_name: - User specified display name for the data transfer. - data_source_id: - Data source id. Cannot be changed once data transfer is - created. - params: - Data transfer specific parameters. - schedule: - Data transfer schedule. If the data source does not support a - custom schedule, this should be empty. If it is empty, the - default value for the data source will be used. The specified - times are in UTC. Examples of valid format: ``1st,3rd monday - of month 15:30``, ``every wed,fri of jan,jun 13:15``, and - ``first sunday of quarter 00:00``. See more explanation about - the format here: https://cloud.google.com/appengine/docs/flexi - ble/python/scheduling-jobs-with-cron- - yaml#the\_schedule\_format NOTE: the granularity should be at - least 8 hours, or less frequent. - schedule_options: - Options customizing the data transfer schedule. - data_refresh_window_days: - The number of days to look back to automatically refresh the - data. For example, if ``data_refresh_window_days = 10``, then - every day BigQuery reingests data for [today-10, today-1], - rather than ingesting data for just [today-1]. Only valid if - the data source supports the feature. Set the value to 0 to - use the default value. - disabled: - Is this config disabled. When set to true, no runs are - scheduled for a given transfer. - update_time: - Output only. Data transfer modification time. Ignored by - server on input. - next_run_time: - Output only. Next time when data transfer will run. - state: - Output only. State of the most recently updated transfer run. - user_id: - Deprecated. Unique ID of the user on whose behalf transfer is - done. - dataset_region: - Output only. Region in which BigQuery dataset is located. - notification_pubsub_topic: - Pub/Sub topic where notifications will be sent after transfer - runs associated with this transfer config finish. - email_preferences: - Email notifications will be sent according to these - preferences to the email address of the user who owns this - transfer config. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.TransferConfig) - ), -) -_sym_db.RegisterMessage(TransferConfig) - -TransferRun = _reflection.GeneratedProtocolMessageType( - "TransferRun", - (_message.Message,), - dict( - DESCRIPTOR=_TRANSFERRUN, - __module__="google.cloud.bigquery.datatransfer_v1.proto.transfer_pb2", - __doc__="""Represents a data transfer run. - - - Attributes: - name: - The resource name of the transfer run. Transfer run names have - the form ``projects/{project_id}/locations/{location}/transfer - Configs/{config_id}/runs/{run_id}``. The name is ignored when - creating a transfer run. - schedule_time: - Minimum time after which a transfer run can be started. - run_time: - For batch transfer runs, specifies the date and time of the - data should be ingested. - error_status: - Status of the transfer run. - start_time: - Output only. Time when transfer run was started. Parameter - ignored by server for input requests. - end_time: - Output only. Time when transfer run ended. Parameter ignored - by server for input requests. - update_time: - Output only. Last time the data transfer run state was - updated. - params: - Output only. Data transfer specific parameters. - destination: - Data transfer destination. - destination_dataset_id: - Output only. The BigQuery target dataset id. - data_source_id: - Output only. Data source id. - state: - Data transfer run state. Ignored for input requests. - user_id: - Deprecated. Unique ID of the user on whose behalf transfer is - done. - schedule: - Output only. Describes the schedule of this transfer run if it - was created as part of a regular schedule. For batch transfer - runs that are scheduled manually, this is empty. NOTE: the - system might choose to delay the schedule depending on the - current load, so ``schedule_time`` doesn't always match this. - notification_pubsub_topic: - Output only. Pub/Sub topic where a notification will be sent - after this transfer run finishes - email_preferences: - Output only. Email notifications will be sent according to - these preferences to the email address of the user who owns - the transfer config this run was derived from. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.TransferRun) - ), -) -_sym_db.RegisterMessage(TransferRun) - -TransferMessage = _reflection.GeneratedProtocolMessageType( - "TransferMessage", - (_message.Message,), - dict( - DESCRIPTOR=_TRANSFERMESSAGE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.transfer_pb2", - __doc__="""Represents a user facing message for a particular data - transfer run. - - - Attributes: - message_time: - Time when message was logged. - severity: - Message severity. - message_text: - Message text. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.TransferMessage) - ), -) -_sym_db.RegisterMessage(TransferMessage) - - -DESCRIPTOR._options = None -_TRANSFERTYPE._options = None -_TRANSFERCONFIG.fields_by_name["update_time"]._options = None -_TRANSFERCONFIG.fields_by_name["next_run_time"]._options = None -_TRANSFERCONFIG.fields_by_name["state"]._options = None -_TRANSFERCONFIG.fields_by_name["dataset_region"]._options = None -_TRANSFERCONFIG._options = None -_TRANSFERRUN.fields_by_name["start_time"]._options = None -_TRANSFERRUN.fields_by_name["end_time"]._options = None -_TRANSFERRUN.fields_by_name["update_time"]._options = None -_TRANSFERRUN.fields_by_name["params"]._options = None -_TRANSFERRUN.fields_by_name["destination_dataset_id"]._options = None -_TRANSFERRUN.fields_by_name["data_source_id"]._options = None -_TRANSFERRUN.fields_by_name["schedule"]._options = None -_TRANSFERRUN.fields_by_name["notification_pubsub_topic"]._options = None -_TRANSFERRUN.fields_by_name["email_preferences"]._options = None -_TRANSFERRUN._options = None -# @@protoc_insertion_point(module_scope) diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2_grpc.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/types.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/types.py deleted file mode 100644 index ea0ad1905fa3..000000000000 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/types.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys - -from google.api_core.protobuf_helpers import get_messages - -from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2 -from google.cloud.bigquery_datatransfer_v1.proto import transfer_pb2 -from google.protobuf import any_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import struct_pb2 -from google.protobuf import timestamp_pb2 -from google.protobuf import wrappers_pb2 -from google.rpc import status_pb2 - - -_shared_modules = [ - any_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - struct_pb2, - timestamp_pb2, - wrappers_pb2, - status_pb2, -] - -_local_modules = [datatransfer_pb2, transfer_pb2] - -names = [] - -for module in _shared_modules: # pragma: NO COVER - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.bigquery_datatransfer_v1.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - - -__all__ = tuple(sorted(names)) diff --git a/bigquery_datatransfer/noxfile.py b/bigquery_datatransfer/noxfile.py deleted file mode 100644 index d1b842840fdb..000000000000 --- a/bigquery_datatransfer/noxfile.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! - -from __future__ import absolute_import -import os -import shutil - -import nox - - -LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) -BLACK_VERSION = "black==19.3b0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -if os.path.exists("samples"): - BLACK_PATHS.append("samples") - - -@nox.session(python="3.7") -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) - session.run("black", "--check", *BLACK_PATHS) - session.run("flake8", "google", "tests") - - -@nox.session(python="3.6") -def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ - session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) - - -@nox.session(python="3.7") -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def default(session): - # Install all test dependencies, then install this package in-place. - session.install("mock", "pytest", "pytest-cov") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", ".") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - "--cov=google.cloud", - "--cov=tests.unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - ) - - -@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"]) -def unit(session): - """Run the unit test suite.""" - default(session) - - -@nox.session(python=["2.7", "3.7"]) -def system(session): - """Run the system test suite.""" - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", "../test_utils/") - session.install("-e", ".") - - # Run py.test against the system tests. - if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) - if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) - - -@nox.session(python=["2.7", "3.7"]) -def samples(session): - requirements_path = os.path.join("samples", "requirements.txt") - requirements_exists = os.path.exists(requirements_path) - - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") - - session.install("mock", "pytest") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - if requirements_exists: - session.install("-r", requirements_path) - session.install("-e", ".") - - session.run("py.test", "--quiet", "samples", *session.posargs) - - -@nox.session(python="3.7") -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=79") - - session.run("coverage", "erase") - - -@nox.session(python="3.7") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/bigquery_datatransfer/samples/__init__.py b/bigquery_datatransfer/samples/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery_datatransfer/samples/create_scheduled_query.py b/bigquery_datatransfer/samples/create_scheduled_query.py deleted file mode 100644 index b4e7437fb37d..000000000000 --- a/bigquery_datatransfer/samples/create_scheduled_query.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# To install the latest published package dependency, execute the following: -# pip install google-cloud-bigquery-datatransfer - - -def sample_create_transfer_config(project_id, dataset_id, authorization_code=""): - # [START bigquerydatatransfer_create_scheduled_query] - from google.cloud import bigquery_datatransfer_v1 - import google.protobuf.json_format - - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # TODO(developer): Set the project_id to the project that contains the - # destination dataset. - # project_id = "your-project-id" - - # TODO(developer): Set the destination dataset. The authorized user must - # have owner permissions on the dataset. - # dataset_id = "your_dataset_id" - - # TODO(developer): The first time you run this sample, set the - # authorization code to a value from the URL: - # https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=433065040935-hav5fqnc9p9cht3rqneus9115ias2kn1.apps.googleusercontent.com&scope=https://www.googleapis.com/auth/bigquery%20https://www.googleapis.com/auth/drive&redirect_uri=urn:ietf:wg:oauth:2.0:oob - # - # authorization_code = "_4/ABCD-EFGHIJKLMNOP-QRSTUVWXYZ" - # - # You can use an empty string for authorization_code in subsequent runs of - # this code sample with the same credentials. - # - # authorization_code = "" - - # Use standard SQL syntax for the query. - query_string = """ - SELECT - CURRENT_TIMESTAMP() as current_time, - @run_time as intended_run_time, - @run_date as intended_run_date, - 17 as some_integer - """ - - parent = client.project_path(project_id) - - transfer_config = google.protobuf.json_format.ParseDict( - { - "destination_dataset_id": dataset_id, - "display_name": "Your Scheduled Query Name", - "data_source_id": "scheduled_query", - "params": { - "query": query_string, - "destination_table_name_template": "your_table_{run_date}", - "write_disposition": "WRITE_TRUNCATE", - "partitioning_field": "", - }, - "schedule": "every 24 hours", - }, - bigquery_datatransfer_v1.types.TransferConfig(), - ) - - response = client.create_transfer_config( - parent, transfer_config, authorization_code=authorization_code - ) - - print("Created scheduled query '{}'".format(response.name)) - # [END bigquerydatatransfer_create_scheduled_query] - # Return the config name for testing purposes, so that it can be deleted. - return response.name - - -def main(): - import argparse - - parser = argparse.ArgumentParser() - parser.add_argument("--project_id", type=str, default="your-project-id") - parser.add_argument("--dataset_id", type=str, default="your_dataset_id") - parser.add_argument("--authorization_code", type=str, default="") - args = parser.parse_args() - - sample_create_transfer_config(args.project_id, args.authorization_code) - - -if __name__ == "__main__": - main() diff --git a/bigquery_datatransfer/samples/requirements.txt b/bigquery_datatransfer/samples/requirements.txt deleted file mode 100644 index 07315b55faa6..000000000000 --- a/bigquery_datatransfer/samples/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e ../bigquery diff --git a/bigquery_datatransfer/samples/tests/__init__.py b/bigquery_datatransfer/samples/tests/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery_datatransfer/samples/tests/test_create_scheduled_query.py b/bigquery_datatransfer/samples/tests/test_create_scheduled_query.py deleted file mode 100644 index a1a69c1ff0c3..000000000000 --- a/bigquery_datatransfer/samples/tests/test_create_scheduled_query.py +++ /dev/null @@ -1,82 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time -import os - -import google.api_core.exceptions -import google.auth -import google.cloud.bigquery -import pytest - -from .. import create_scheduled_query - - -@pytest.fixture -def project_id(): - return os.environ["PROJECT_ID"] - - -@pytest.fixture(scope="module") -def credentials(): - # If using a service account, the BQ DTS robot associated with your project - # requires the roles/iam.serviceAccountShortTermTokenMinter permission to - # act on behalf of the account. - creds, _ = google.auth.default(["https://www.googleapis.com/auth/cloud-platform"]) - return creds - - -@pytest.fixture(scope="module") -def bqdts_client(credentials): - from google.cloud import bigquery_datatransfer_v1 - - return bigquery_datatransfer_v1.DataTransferServiceClient(credentials=credentials) - - -@pytest.fixture(scope="module") -def bigquery_client(credentials): - return google.cloud.bigquery.Client(credentials=credentials) - - -@pytest.fixture(scope="module") -def dataset_id(bigquery_client): - # Ensure the test account has owner permissions on the dataset by creating - # one from scratch. - temp_ds_id = "bqdts_{}".format(int(time.clock() * 1000000)) - bigquery_client.create_dataset(temp_ds_id) - yield temp_ds_id - bigquery_client.delete_dataset(temp_ds_id) - - -@pytest.fixture -def to_delete(bqdts_client): - doomed = [] - yield doomed - - for resource_name in doomed: - try: - bqdts_client.delete_transfer_config(resource_name) - except google.api_core.exceptions.NotFound: - pass - - -def test_sample(project_id, dataset_id, capsys, to_delete): - config_name = create_scheduled_query.sample_create_transfer_config( - project_id, dataset_id - ) - to_delete.append(config_name) - out, err = capsys.readouterr() - assert config_name in out diff --git a/bigquery_datatransfer/setup.cfg b/bigquery_datatransfer/setup.cfg deleted file mode 100644 index 3bd555500e37..000000000000 --- a/bigquery_datatransfer/setup.cfg +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 diff --git a/bigquery_datatransfer/setup.py b/bigquery_datatransfer/setup.py deleted file mode 100644 index 49a6456283ce..000000000000 --- a/bigquery_datatransfer/setup.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import io -import os - -import setuptools - - -# Package metadata. - -name = "google-cloud-bigquery-datatransfer" -description = "BigQuery Data Transfer API client library" -version = "0.4.1" -# Should be one of: -# 'Development Status :: 3 - Alpha' -# 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 3 - Alpha" -dependencies = ["google-api-core[grpc] >= 1.14.0, < 2.0.0dev"] -extras = {} - - -# Setup boilerplate below this line. - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -# Only include packages under the 'google' namespace. Do not include tests, -# benchmarks, etc. -packages = [ - package for package in setuptools.find_packages() if package.startswith("google") -] - -# Determine which namespaces are needed. -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url="https://github.com/GoogleCloudPlatform/google-cloud-python", - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - namespace_packages=namespaces, - install_requires=dependencies, - extras_require=extras, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", - include_package_data=True, - zip_safe=False, -) diff --git a/bigquery_datatransfer/synth.metadata b/bigquery_datatransfer/synth.metadata deleted file mode 100644 index a0978e1250ea..000000000000 --- a/bigquery_datatransfer/synth.metadata +++ /dev/null @@ -1,189 +0,0 @@ -{ - "updateTime": "2020-01-30T13:14:05.045150Z", - "sources": [ - { - "generator": { - "name": "artman", - "version": "0.44.4", - "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "c1246a29e22b0f98e800a536b5b0da2d933a55f2", - "internalRef": "292310790", - "log": "c1246a29e22b0f98e800a536b5b0da2d933a55f2\nUpdating v1 protos with the latest inline documentation (in comments) and config options. Also adding a per-service .yaml file.\n\nPiperOrigin-RevId: 292310790\n\nb491d07cadaae7cde5608321f913e5ca1459b32d\nRevert accidental local_repository change\n\nPiperOrigin-RevId: 292245373\n\naf3400a8cb6110025198b59a0f7d018ae3cda700\nUpdate gapic-generator dependency (prebuilt PHP binary support).\n\nPiperOrigin-RevId: 292243997\n\n341fd5690fae36f36cf626ef048fbcf4bbe7cee6\ngrafeas: v1 add resource_definition for the grafeas.io/Project and change references for Project.\n\nPiperOrigin-RevId: 292221998\n\n42e915ec2ece1cd37a590fbcd10aa2c0fb0e5b06\nUpdate the gapic-generator, protoc-java-resource-name-plugin and protoc-docs-plugin to the latest commit.\n\nPiperOrigin-RevId: 292182368\n\nf035f47250675d31492a09f4a7586cfa395520a7\nFix grafeas build and update build.sh script to include gerafeas.\n\nPiperOrigin-RevId: 292168753\n\n26ccb214b7bc4a716032a6266bcb0a9ca55d6dbb\nasset: v1p1beta1 add client config annotations and retry config\n\nPiperOrigin-RevId: 292154210\n\n974ee5c0b5d03e81a50dafcedf41e0efebb5b749\nasset: v1beta1 add client config annotations\n\nPiperOrigin-RevId: 292152573\n\n" - } - }, - { - "template": { - "name": "python_library", - "origin": "synthtool.gcp", - "version": "2019.10.17" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "bigquery_datatransfer", - "apiVersion": "v1", - "language": "python", - "generator": "gapic", - "config": "google/cloud/bigquery/datatransfer/artman_bigquerydatatransfer.yaml" - } - } - ], - "newFiles": [ - { - "path": ".coveragerc" - }, - { - "path": ".flake8" - }, - { - "path": ".repo-metadata.json" - }, - { - "path": "CHANGELOG.md" - }, - { - "path": "LICENSE" - }, - { - "path": "MANIFEST.in" - }, - { - "path": "README.rst" - }, - { - "path": "docs/README.rst" - }, - { - "path": "docs/_static/custom.css" - }, - { - "path": "docs/_templates/layout.html" - }, - { - "path": "docs/changelog.md" - }, - { - "path": "docs/conf.py" - }, - { - "path": "docs/gapic/v1/api.rst" - }, - { - "path": "docs/gapic/v1/types.rst" - }, - { - "path": "docs/index.rst" - }, - { - "path": "google/__init__.py" - }, - { - "path": "google/cloud/__init__.py" - }, - { - "path": "google/cloud/bigquery_datatransfer.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/__init__.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/gapic/__init__.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/gapic/enums.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/gapic/transports/__init__.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/proto/__init__.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/proto/datasource.proto" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2_grpc.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/proto/transfer.proto" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2_grpc.py" - }, - { - "path": "google/cloud/bigquery_datatransfer_v1/types.py" - }, - { - "path": "noxfile.py" - }, - { - "path": "samples/__init__.py" - }, - { - "path": "samples/create_scheduled_query.py" - }, - { - "path": "samples/requirements.txt" - }, - { - "path": "samples/tests/__init__.py" - }, - { - "path": "samples/tests/test_create_scheduled_query.py" - }, - { - "path": "setup.cfg" - }, - { - "path": "setup.py" - }, - { - "path": "synth.metadata" - }, - { - "path": "synth.py" - }, - { - "path": "tests/system/gapic/v1/test_system_data_transfer_service_v1.py" - }, - { - "path": "tests/unit/gapic/v1/test_data_transfer_service_client_v1.py" - }, - { - "path": "tests/unit/test_shim.py" - } - ] -} \ No newline at end of file diff --git a/bigquery_datatransfer/synth.py b/bigquery_datatransfer/synth.py deleted file mode 100644 index 9a8acff4369c..000000000000 --- a/bigquery_datatransfer/synth.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import synthtool as s -from synthtool import gcp - -gapic = gcp.GAPICGenerator() -common = gcp.CommonTemplates() -version = "v1" - -# ---------------------------------------------------------------------------- -# Generate bigquery_datatransfer GAPIC layer -# ---------------------------------------------------------------------------- -library = gapic.py_library( - "bigquery_datatransfer", - version, - config_path="/google/cloud/bigquery/datatransfer/" - "artman_bigquerydatatransfer.yaml", - artman_output_name="bigquerydatatransfer-v1", - include_protos=True, -) - -s.move( - library, - excludes=["docs/conf.py", "docs/index.rst", "README.rst", "nox.py", "setup.py"], -) - -s.replace( - [ - "google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py", - "google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py", - ], - "from google.cloud.bigquery.datatransfer_v1.proto", - "from google.cloud.bigquery_datatransfer_v1.proto", -) - -s.replace( - "google/cloud/bigquery_datatransfer_v1/gapic/" "data_transfer_service_client.py", - "google-cloud-bigquerydatatransfer", - "google-cloud-bigquery-datatransfer", -) - -s.replace( - "google/cloud/bigquery_datatransfer_v1/gapic/" "data_transfer_service_client.py", - "import google.api_core.gapic_v1.method\n", - "\g<0>import google.api_core.path_template\n", -) - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=79, cov_level=79, samples_test=True) -s.move(templated_files) - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/bigquery_datatransfer/tests/system/gapic/v1/test_system_data_transfer_service_v1.py b/bigquery_datatransfer/tests/system/gapic/v1/test_system_data_transfer_service_v1.py deleted file mode 100644 index 24fc2182dd2f..000000000000 --- a/bigquery_datatransfer/tests/system/gapic/v1/test_system_data_transfer_service_v1.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import time - -from google.cloud import bigquery_datatransfer_v1 -from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2 - - -class TestSystemDataTransferService(object): - def test_list_data_sources(self): - project_id = os.environ["PROJECT_ID"] - - client = bigquery_datatransfer_v1.DataTransferServiceClient() - parent = client.location_path(project_id, "us-central1") - response = client.list_data_sources(parent) diff --git a/bigquery_datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py b/bigquery_datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py deleted file mode 100644 index 86fed1b28a85..000000000000 --- a/bigquery_datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py +++ /dev/null @@ -1,731 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import bigquery_datatransfer_v1 -from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2 -from google.cloud.bigquery_datatransfer_v1.proto import transfer_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestDataTransferServiceClient(object): - def test_get_data_source(self): - # Setup Expected Response - name_2 = "name2-1052831874" - data_source_id = "dataSourceId-1015796374" - display_name = "displayName1615086568" - description = "description-1724546052" - client_id = "clientId-1904089585" - supports_multiple_transfers = True - update_deadline_seconds = 991471694 - default_schedule = "defaultSchedule-800168235" - supports_custom_schedule = True - help_url = "helpUrl-789431439" - default_data_refresh_window_days = 1804935157 - manual_runs_disabled = True - expected_response = { - "name": name_2, - "data_source_id": data_source_id, - "display_name": display_name, - "description": description, - "client_id": client_id, - "supports_multiple_transfers": supports_multiple_transfers, - "update_deadline_seconds": update_deadline_seconds, - "default_schedule": default_schedule, - "supports_custom_schedule": supports_custom_schedule, - "help_url": help_url, - "default_data_refresh_window_days": default_data_refresh_window_days, - "manual_runs_disabled": manual_runs_disabled, - } - expected_response = datatransfer_pb2.DataSource(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") - - response = client.get_data_source(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.GetDataSourceRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_data_source_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") - - with pytest.raises(CustomException): - client.get_data_source(name) - - def test_list_data_sources(self): - # Setup Expected Response - next_page_token = "" - data_sources_element = {} - data_sources = [data_sources_element] - expected_response = { - "next_page_token": next_page_token, - "data_sources": data_sources, - } - expected_response = datatransfer_pb2.ListDataSourcesResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_data_sources(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.data_sources[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.ListDataSourcesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_data_sources_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_data_sources(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_create_transfer_config(self): - # Setup Expected Response - name = "name3373707" - destination_dataset_id = "destinationDatasetId1541564179" - display_name = "displayName1615086568" - data_source_id = "dataSourceId-1015796374" - schedule = "schedule-697920873" - data_refresh_window_days = 327632845 - disabled = True - user_id = 147132913 - dataset_region = "datasetRegion959248539" - notification_pubsub_topic = "notificationPubsubTopic1794281191" - expected_response = { - "name": name, - "destination_dataset_id": destination_dataset_id, - "display_name": display_name, - "data_source_id": data_source_id, - "schedule": schedule, - "data_refresh_window_days": data_refresh_window_days, - "disabled": disabled, - "user_id": user_id, - "dataset_region": dataset_region, - "notification_pubsub_topic": notification_pubsub_topic, - } - expected_response = transfer_pb2.TransferConfig(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - transfer_config = {} - - response = client.create_transfer_config(parent, transfer_config) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.CreateTransferConfigRequest( - parent=parent, transfer_config=transfer_config - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_transfer_config_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - transfer_config = {} - - with pytest.raises(CustomException): - client.create_transfer_config(parent, transfer_config) - - def test_update_transfer_config(self): - # Setup Expected Response - name = "name3373707" - destination_dataset_id = "destinationDatasetId1541564179" - display_name = "displayName1615086568" - data_source_id = "dataSourceId-1015796374" - schedule = "schedule-697920873" - data_refresh_window_days = 327632845 - disabled = True - user_id = 147132913 - dataset_region = "datasetRegion959248539" - notification_pubsub_topic = "notificationPubsubTopic1794281191" - expected_response = { - "name": name, - "destination_dataset_id": destination_dataset_id, - "display_name": display_name, - "data_source_id": data_source_id, - "schedule": schedule, - "data_refresh_window_days": data_refresh_window_days, - "disabled": disabled, - "user_id": user_id, - "dataset_region": dataset_region, - "notification_pubsub_topic": notification_pubsub_topic, - } - expected_response = transfer_pb2.TransferConfig(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - transfer_config = {} - update_mask = {} - - response = client.update_transfer_config(transfer_config, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.UpdateTransferConfigRequest( - transfer_config=transfer_config, update_mask=update_mask - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_transfer_config_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - transfer_config = {} - update_mask = {} - - with pytest.raises(CustomException): - client.update_transfer_config(transfer_config, update_mask) - - def test_delete_transfer_config(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - client.delete_transfer_config(name) - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.DeleteTransferConfigRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_transfer_config_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - with pytest.raises(CustomException): - client.delete_transfer_config(name) - - def test_get_transfer_config(self): - # Setup Expected Response - name_2 = "name2-1052831874" - destination_dataset_id = "destinationDatasetId1541564179" - display_name = "displayName1615086568" - data_source_id = "dataSourceId-1015796374" - schedule = "schedule-697920873" - data_refresh_window_days = 327632845 - disabled = True - user_id = 147132913 - dataset_region = "datasetRegion959248539" - notification_pubsub_topic = "notificationPubsubTopic1794281191" - expected_response = { - "name": name_2, - "destination_dataset_id": destination_dataset_id, - "display_name": display_name, - "data_source_id": data_source_id, - "schedule": schedule, - "data_refresh_window_days": data_refresh_window_days, - "disabled": disabled, - "user_id": user_id, - "dataset_region": dataset_region, - "notification_pubsub_topic": notification_pubsub_topic, - } - expected_response = transfer_pb2.TransferConfig(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - response = client.get_transfer_config(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.GetTransferConfigRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_transfer_config_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - with pytest.raises(CustomException): - client.get_transfer_config(name) - - def test_list_transfer_configs(self): - # Setup Expected Response - next_page_token = "" - transfer_configs_element = {} - transfer_configs = [transfer_configs_element] - expected_response = { - "next_page_token": next_page_token, - "transfer_configs": transfer_configs, - } - expected_response = datatransfer_pb2.ListTransferConfigsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_transfer_configs(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.transfer_configs[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.ListTransferConfigsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_transfer_configs_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_transfer_configs(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_schedule_transfer_runs(self): - # Setup Expected Response - expected_response = {} - expected_response = datatransfer_pb2.ScheduleTransferRunsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - parent = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - start_time = {} - end_time = {} - - response = client.schedule_transfer_runs(parent, start_time, end_time) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.ScheduleTransferRunsRequest( - parent=parent, start_time=start_time, end_time=end_time - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_schedule_transfer_runs_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - parent = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - start_time = {} - end_time = {} - - with pytest.raises(CustomException): - client.schedule_transfer_runs(parent, start_time, end_time) - - def test_get_transfer_run(self): - # Setup Expected Response - name_2 = "name2-1052831874" - destination_dataset_id = "destinationDatasetId1541564179" - data_source_id = "dataSourceId-1015796374" - user_id = 147132913 - schedule = "schedule-697920873" - notification_pubsub_topic = "notificationPubsubTopic1794281191" - expected_response = { - "name": name_2, - "destination_dataset_id": destination_dataset_id, - "data_source_id": data_source_id, - "user_id": user_id, - "schedule": schedule, - "notification_pubsub_topic": notification_pubsub_topic, - } - expected_response = transfer_pb2.TransferRun(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - response = client.get_transfer_run(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.GetTransferRunRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_transfer_run_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - with pytest.raises(CustomException): - client.get_transfer_run(name) - - def test_delete_transfer_run(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - client.delete_transfer_run(name) - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.DeleteTransferRunRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_transfer_run_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - with pytest.raises(CustomException): - client.delete_transfer_run(name) - - def test_list_transfer_runs(self): - # Setup Expected Response - next_page_token = "" - transfer_runs_element = {} - transfer_runs = [transfer_runs_element] - expected_response = { - "next_page_token": next_page_token, - "transfer_runs": transfer_runs, - } - expected_response = datatransfer_pb2.ListTransferRunsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - parent = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - paged_list_response = client.list_transfer_runs(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.transfer_runs[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.ListTransferRunsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_transfer_runs_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - parent = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - paged_list_response = client.list_transfer_runs(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_list_transfer_logs(self): - # Setup Expected Response - next_page_token = "" - transfer_messages_element = {} - transfer_messages = [transfer_messages_element] - expected_response = { - "next_page_token": next_page_token, - "transfer_messages": transfer_messages, - } - expected_response = datatransfer_pb2.ListTransferLogsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - parent = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - paged_list_response = client.list_transfer_logs(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.transfer_messages[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.ListTransferLogsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_transfer_logs_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - parent = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - paged_list_response = client.list_transfer_logs(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_check_valid_creds(self): - # Setup Expected Response - has_valid_creds = False - expected_response = {"has_valid_creds": has_valid_creds} - expected_response = datatransfer_pb2.CheckValidCredsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") - - response = client.check_valid_creds(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.CheckValidCredsRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_check_valid_creds_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") - - with pytest.raises(CustomException): - client.check_valid_creds(name) - - def test_start_manual_transfer_runs(self): - # Setup Expected Response - expected_response = {} - expected_response = datatransfer_pb2.StartManualTransferRunsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - response = client.start_manual_transfer_runs() - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.StartManualTransferRunsRequest() - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_start_manual_transfer_runs_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - with pytest.raises(CustomException): - client.start_manual_transfer_runs() diff --git a/bigquery_datatransfer/tests/unit/test_shim.py b/bigquery_datatransfer/tests/unit/test_shim.py deleted file mode 100644 index c27963bce4ab..000000000000 --- a/bigquery_datatransfer/tests/unit/test_shim.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests for versionless import.""" - - -def test_shim(): - from google.cloud import bigquery_datatransfer - from google.cloud import bigquery_datatransfer_v1 - - assert bigquery_datatransfer.__all__ == bigquery_datatransfer_v1.__all__ - - for name in bigquery_datatransfer.__all__: - found = getattr(bigquery_datatransfer, name) - expected = getattr(bigquery_datatransfer_v1, name) - assert found is expected diff --git a/bigquery_storage/.coveragerc b/bigquery_storage/.coveragerc deleted file mode 100644 index b178b094aa1d..000000000000 --- a/bigquery_storage/.coveragerc +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[run] -branch = True - -[report] -fail_under = 100 -show_missing = True -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file diff --git a/bigquery_storage/.flake8 b/bigquery_storage/.flake8 deleted file mode 100644 index 0268ecc9c55c..000000000000 --- a/bigquery_storage/.flake8 +++ /dev/null @@ -1,14 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - *_pb2.py - - # Standard linting exemptions. - __pycache__, - .git, - *.pyc, - conf.py diff --git a/bigquery_storage/.gitignore b/bigquery_storage/.gitignore deleted file mode 100644 index 9e3a5f25770c..000000000000 --- a/bigquery_storage/.gitignore +++ /dev/null @@ -1 +0,0 @@ -docs/_build \ No newline at end of file diff --git a/bigquery_storage/.repo-metadata.json b/bigquery_storage/.repo-metadata.json deleted file mode 100644 index a7f9c207bb7c..000000000000 --- a/bigquery_storage/.repo-metadata.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "bigquerystorage", - "name_pretty": "Google BigQuery Storage", - "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage/", - "client_documentation": "https://googleapis.dev/python/bigquerystorage/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", - "release_level": "beta", - "language": "python", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-bigquery-storage", - "api_id": "bigquerystorage.googleapis.com", - "requires_billing": true -} \ No newline at end of file diff --git a/bigquery_storage/CHANGELOG.md b/bigquery_storage/CHANGELOG.md deleted file mode 100644 index f2fb6237e2b6..000000000000 --- a/bigquery_storage/CHANGELOG.md +++ /dev/null @@ -1,140 +0,0 @@ -# Changelog - -[PyPI History][1] - -[1]: https://pypi.org/project/google-cloud-bigquery-storage/#history - -## 0.7.0 - -07-31-2019 17:48 PDT - - -### New Features -- Support faster Arrow data format in `to_dataframe` and `to_arrow` when using BigQuery Storage API. ([#8551](https://github.com/googleapis/google-cloud-python/pull/8551)) - -### Dependencies -- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) -- Update pins of 'googleapis-common-protos. ([#8688](https://github.com/googleapis/google-cloud-python/pull/8688)) - -### Documentation -- Update quickstart sample with data format and sharding options. ([#8665](https://github.com/googleapis/google-cloud-python/pull/8665)) -- Fix links to bigquery storage documentation. ([#8859](https://github.com/googleapis/google-cloud-python/pull/8859)) -- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) -- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) - -### Internal / Testing Changes -- Pin black version. (via synth). ([#8672](https://github.com/googleapis/google-cloud-python/pull/8672)) - -## 0.6.0 - -07-11-2019 13:15 PDT - -### New Features - -- Add `to_arrow` with support for Arrow data format. ([#8644](https://github.com/googleapis/google-cloud-python/pull/8644)) -- Add 'client_options' support (via synth). ([#8536](https://github.com/googleapis/google-cloud-python/pull/8536)) -- Add sharding strategy, stream splitting, Arrow support (via synth). ([#8477](https://github.com/googleapis/google-cloud-python/pull/8477)) - -### Documentation - -- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) - -### Internal / Testing Changes - -- Allow kwargs to be passed to create_channel (via synth). ([#8441](https://github.com/googleapis/google-cloud-python/pull/8441)) -- Add encoding declaration to protoc-generated files (via synth). ([#8345](https://github.com/googleapis/google-cloud-python/pull/8345)) -- Refactor `reader.ReadRowsPage` to use `_StreamParser`. ([#8262](https://github.com/googleapis/google-cloud-python/pull/8262)) -- Fix coverage in 'types.py' (via synth). ([#8148](https://github.com/googleapis/google-cloud-python/pull/8148)) -- Add empty lines, remove coverage exclusions (via synth). ([#8051](https://github.com/googleapis/google-cloud-python/pull/8051)) - -## 0.5.0 - -05-20-2019 09:23 PDT - -### Implementation Changes - -- Increase default deadline on ReadRows. ([#8030](https://github.com/googleapis/google-cloud-python/pull/8030)) -- Respect timeout on `client.read_rows`. Don't resume on `DEADLINE_EXCEEDED` errors. ([#8025](https://github.com/googleapis/google-cloud-python/pull/8025)) - -### Documentation - -- Use alabaster theme everwhere. ([#8021](https://github.com/googleapis/google-cloud-python/pull/8021)) - -## 0.4.0 - -04-16-2019 13:46 PDT - -### Implementation Changes - -- Remove gRPC size limit in the transport options ([#7664](https://github.com/googleapis/google-cloud-python/pull/7664)) -- Add retry params for create_read_session (via synth). ([#7658](https://github.com/googleapis/google-cloud-python/pull/7658)) - -### New Features - -- Add page iterator to ReadRowsStream ([#7680](https://github.com/googleapis/google-cloud-python/pull/7680)) - -### Internal / Testing Changes - -- Remove system test for split rows ([#7673](https://github.com/googleapis/google-cloud-python/pull/7673)) - -## 0.3.0 - -04-02-2019 15:22 PDT - -### Dependencies - -- Add dependency for resource proto. ([#7585](https://github.com/googleapis/google-cloud-python/pull/7585)) -- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) - -### Documentation - -- Fix links to BigQuery Storage API docs ([#7647](https://github.com/googleapis/google-cloud-python/pull/7647)) -- Update proto / docstrings (via synth). ([#7461](https://github.com/googleapis/google-cloud-python/pull/7461)) -- googlecloudplatform --> googleapis in READMEs ([#7411](https://github.com/googleapis/google-cloud-python/pull/7411)) -- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) -- Blacken new quickstart snippet. ([#7242](https://github.com/googleapis/google-cloud-python/pull/7242)) -- Add quickstart demonstrating most BQ Storage API read features ([#7223](https://github.com/googleapis/google-cloud-python/pull/7223)) -- Add bigquery_storage to docs ([#7222](https://github.com/googleapis/google-cloud-python/pull/7222)) - -### Internal / Testing Changes - -- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) -- Copy lintified proto files (via synth). ([#7475](https://github.com/googleapis/google-cloud-python/pull/7475)) -- Add annotations to protocol buffers indicating request parameters (via synth). ([#7550](https://github.com/googleapis/google-cloud-python/pull/7550)) - -## 0.2.0 - -01-25-2019 13:54 PST - -### New Features - -- Add option to choose dtypes by column in to_dataframe. ([#7126](https://github.com/googleapis/google-cloud-python/pull/7126)) - -### Internal / Testing Changes - -- Update copyright headers -- Protoc-generated serialization update. ([#7076](https://github.com/googleapis/google-cloud-python/pull/7076)) -- BigQuery Storage: run 'blacken' during synth ([#7047](https://github.com/googleapis/google-cloud-python/pull/7047)) - -## 0.1.1 - -12-17-2018 18:03 PST - - -### Implementation Changes -- Import `iam.policy` from `google.api_core`. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) -- Pick up fixes in GAPIC generator. ([#6708](https://github.com/googleapis/google-cloud-python/pull/6708)) - -### Documentation -- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) - -### Internal / Testing Changes -- Blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) -- Correct release_status for bigquery_storage ([#6767](https://github.com/googleapis/google-cloud-python/pull/6767)) - -## 0.1.0 - -11-29-2018 13:45 PST - -- Initial release of BigQuery Storage API client. - diff --git a/bigquery_storage/LICENSE b/bigquery_storage/LICENSE deleted file mode 100644 index a8ee855de2aa..000000000000 --- a/bigquery_storage/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - https://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/bigquery_storage/MANIFEST.in b/bigquery_storage/MANIFEST.in deleted file mode 100644 index 9cbf175afe6b..000000000000 --- a/bigquery_storage/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include README.rst LICENSE -recursive-include google *.json *.proto -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/bigquery_storage/README.rst b/bigquery_storage/README.rst deleted file mode 100644 index 6b600ca0bd67..000000000000 --- a/bigquery_storage/README.rst +++ /dev/null @@ -1,102 +0,0 @@ -Python Client for BigQuery Storage API (`Beta`_) -================================================= - - - -`BigQuery Storage API`_: - -- `Client Library Documentation`_ -- `Product Documentation`_ - -.. _Beta: https://github.com/googleapis/google-cloud-python/blob/master/README.rst -.. _BigQuery Storage API: https://cloud.google.com/bigquery/docs/reference/storage/ -.. _Client Library Documentation: https://googleapis.dev/python/bigquerystorage/latest -.. _Product Documentation: https://cloud.google.com/bigquery/docs/reference/storage/ - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. `Enable the BigQuery Storage API.`_ -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the BigQuery Storage API.: https://console.cloud.google.com/apis/library/bigquerystorage.googleapis.com -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Supported Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 - -Deprecated Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - source /bin/activate - /bin/pip install google-cloud-bigquery-storage - - -Windows -^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-bigquery-storage - -Optional Dependencies -^^^^^^^^^^^^^^^^^^^^^ - -Several features of ``google-cloud-bigquery-storage`` require additional -dependencies. - -* Parse Avro blocks in a ``read_rows()`` stream using `fastavro - `_. - - ``pip install google-cloud-bigquery-storage[fastavro]`` - -* Write rows to a `pandas `_ - dataframe. - - ``pip install google-cloud-bigquery-storage[pandas,fastavro]`` - -Next Steps -~~~~~~~~~~ - -- Read the `Client Library Documentation`_ for BigQuery Storage API - API to see other available methods on the client. -- Read the `BigQuery Storage API Product documentation`_ to learn - more about the product and see How-to Guides. -- View this `repository’s main README`_ to see the full list of Cloud - APIs that we cover. - -.. _BigQuery Storage API Product documentation: https://cloud.google.com/bigquery/docs/reference/storage/ -.. _repository’s main README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst diff --git a/bigquery_storage/docs/README.rst b/bigquery_storage/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/bigquery_storage/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/bigquery_storage/docs/_static/custom.css b/bigquery_storage/docs/_static/custom.css deleted file mode 100644 index 0abaf229fce3..000000000000 --- a/bigquery_storage/docs/_static/custom.css +++ /dev/null @@ -1,4 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} \ No newline at end of file diff --git a/bigquery_storage/docs/_templates/layout.html b/bigquery_storage/docs/_templates/layout.html deleted file mode 100644 index 228529efe2d2..000000000000 --- a/bigquery_storage/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/bigquery_storage/docs/conf.py b/bigquery_storage/docs/conf.py deleted file mode 100644 index a4bc54f7aa71..000000000000 --- a/bigquery_storage/docs/conf.py +++ /dev/null @@ -1,327 +0,0 @@ -# -*- coding: utf-8 -*- -# -# google-cloud-bigquerystorage documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = ".rst" - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquerystorage" -copyright = u"2017, Google" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquerystorage-doc" - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - #'preamble': '', - # Latex figure (float) alignment - #'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-bigquerystorage.tex", - u"google-cloud-bigquerystorage Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-bigquerystorage", - u"google-cloud-bigquerystorage Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-bigquerystorage", - u"google-cloud-bigquerystorage Documentation", - author, - "google-cloud-bigquerystorage", - "GAPIC library for the {metadata.shortName} v1beta1 service", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), - "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), -} - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/bigquery_storage/docs/gapic/v1beta1/api.rst b/bigquery_storage/docs/gapic/v1beta1/api.rst deleted file mode 100644 index d4df98557e15..000000000000 --- a/bigquery_storage/docs/gapic/v1beta1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for BigQuery Storage API -=============================== - -.. automodule:: google.cloud.bigquery_storage_v1beta1 - :members: - :inherited-members: \ No newline at end of file diff --git a/bigquery_storage/docs/gapic/v1beta1/reader.rst b/bigquery_storage/docs/gapic/v1beta1/reader.rst deleted file mode 100644 index 5b6af828f53e..000000000000 --- a/bigquery_storage/docs/gapic/v1beta1/reader.rst +++ /dev/null @@ -1,6 +0,0 @@ -Reader for BigQuery Storage API -=============================== - -.. automodule:: google.cloud.bigquery_storage_v1beta1.reader - :members: - :inherited-members: diff --git a/bigquery_storage/docs/gapic/v1beta1/types.rst b/bigquery_storage/docs/gapic/v1beta1/types.rst deleted file mode 100644 index a36210a64e52..000000000000 --- a/bigquery_storage/docs/gapic/v1beta1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for BigQuery Storage API Client -===================================== - -.. automodule:: google.cloud.bigquery_storage_v1beta1.types - :members: \ No newline at end of file diff --git a/bigquery_storage/docs/index.rst b/bigquery_storage/docs/index.rst deleted file mode 100644 index f34b0cfb4e5a..000000000000 --- a/bigquery_storage/docs/index.rst +++ /dev/null @@ -1,19 +0,0 @@ -.. include:: README.rst - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - gapic/v1beta1/api - gapic/v1beta1/reader - gapic/v1beta1/types - -Example Usage -------------- - -.. literalinclude:: samples/quickstart.py - :language: python - :dedent: 4 - :start-after: [START bigquerystorage_quickstart] - :end-before: [END bigquerystorage_quickstart] diff --git a/bigquery_storage/docs/samples b/bigquery_storage/docs/samples deleted file mode 120000 index e804737ed3a9..000000000000 --- a/bigquery_storage/docs/samples +++ /dev/null @@ -1 +0,0 @@ -../samples \ No newline at end of file diff --git a/bigquery_storage/google/__init__.py b/bigquery_storage/google/__init__.py deleted file mode 100644 index 8fcc60e2b9c6..000000000000 --- a/bigquery_storage/google/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/bigquery_storage/google/cloud/__init__.py b/bigquery_storage/google/cloud/__init__.py deleted file mode 100644 index 8fcc60e2b9c6..000000000000 --- a/bigquery_storage/google/cloud/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/bigquery_storage/google/cloud/bigquery_storage.py b/bigquery_storage/google/cloud/bigquery_storage.py deleted file mode 100644 index 8613643ee25d..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import - -from google.cloud.bigquery_storage_v1beta1 import BigQueryStorageClient -from google.cloud.bigquery_storage_v1beta1 import enums -from google.cloud.bigquery_storage_v1beta1 import types - - -__all__ = ("enums", "types", "BigQueryStorageClient") diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/__init__.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/__init__.py deleted file mode 100644 index e355a0f6b8a5..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/__init__.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import pkg_resources - -__version__ = pkg_resources.get_distribution( - "google-cloud-bigquery-storage" -).version # noqa - -from google.cloud.bigquery_storage_v1beta1 import types -from google.cloud.bigquery_storage_v1beta1 import client -from google.cloud.bigquery_storage_v1beta1.gapic import enums - - -class BigQueryStorageClient(client.BigQueryStorageClient): - __doc__ = client.BigQueryStorageClient.__doc__ - enums = enums - - -__all__ = ( - # google.cloud.bigquery_storage_v1beta1 - "__version__", - "types", - # google.cloud.bigquery_storage_v1beta1.client - "BigQueryStorageClient", - # google.cloud.bigquery_storage_v1beta1.gapic - "enums", -) diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/client.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/client.py deleted file mode 100644 index 4f53a30550a8..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/client.py +++ /dev/null @@ -1,126 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Parent client for calling the Cloud BigQuery Storage API. - -This is the base from which all interactions with the API occur. -""" - -from __future__ import absolute_import - -import google.api_core.gapic_v1.method - -from google.cloud.bigquery_storage_v1beta1 import reader -from google.cloud.bigquery_storage_v1beta1.gapic import big_query_storage_client # noqa - - -_SCOPES = ( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/cloud-platform", -) - - -class BigQueryStorageClient(big_query_storage_client.BigQueryStorageClient): - """Client for interacting with BigQuery Storage API. - - The BigQuery storage API can be used to read data stored in BigQuery. - """ - - def read_rows( - self, - read_position, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Reads rows from the table in the format prescribed by the read - session. Each response contains one or more table rows, up to a - maximum of 10 MiB per response; read requests which attempt to read - individual rows larger than this will fail. - - Each request also returns a set of stream statistics reflecting the - estimated total number of rows in the read stream. This number is - computed based on the total table size and the number of active - streams in the read session, and may change as other streams continue - to read data. - - Example: - >>> from google.cloud import bigquery_storage_v1beta1 - >>> - >>> client = bigquery_storage_v1beta1.BigQueryStorageClient() - >>> - >>> # TODO: Initialize ``table_reference``: - >>> table_reference = { - ... 'project_id': 'your-data-project-id', - ... 'dataset_id': 'your_dataset_id', - ... 'table_id': 'your_table_id', - ... } - >>> - >>> # TODO: Initialize `parent`: - >>> parent = 'projects/your-billing-project-id' - >>> - >>> session = client.create_read_session(table_reference, parent) - >>> read_position = bigquery_storage_v1beta1.types.StreamPosition( - ... stream=session.streams[0], # TODO: Read the other streams. - ... ) - >>> - >>> for element in client.read_rows(read_position): - ... # process element - ... pass - - Args: - read_position (Union[ \ - dict, \ - ~google.cloud.bigquery_storage_v1beta1.types.StreamPosition \ - ]): - Required. Identifier of the position in the stream to start - reading from. The offset requested must be less than the last - row read from ReadRows. Requesting a larger offset is - undefined. If a dict is provided, it must be of the same form - as the protobuf message - :class:`~google.cloud.bigquery_storage_v1beta1.types.StreamPosition` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - ~google.cloud.bigquery_storage_v1beta1.reader.ReadRowsStream: - An iterable of - :class:`~google.cloud.bigquery_storage_v1beta1.types.ReadRowsResponse`. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - gapic_client = super(BigQueryStorageClient, self) - stream = gapic_client.read_rows( - read_position, retry=retry, timeout=timeout, metadata=metadata - ) - return reader.ReadRowsStream( - stream, - gapic_client, - read_position, - {"retry": retry, "timeout": timeout, "metadata": metadata}, - ) diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/__init__.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py deleted file mode 100644 index 95e08647313f..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py +++ /dev/null @@ -1,671 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.bigquery.storage.v1beta1 BigQueryStorage API.""" - -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.path_template -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import grpc - -from google.cloud.bigquery_storage_v1beta1.gapic import big_query_storage_client_config -from google.cloud.bigquery_storage_v1beta1.gapic import enums -from google.cloud.bigquery_storage_v1beta1.gapic.transports import ( - big_query_storage_grpc_transport, -) -from google.cloud.bigquery_storage_v1beta1.proto import read_options_pb2 -from google.cloud.bigquery_storage_v1beta1.proto import storage_pb2 -from google.cloud.bigquery_storage_v1beta1.proto import storage_pb2_grpc -from google.cloud.bigquery_storage_v1beta1.proto import table_reference_pb2 -from google.protobuf import empty_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-bigquery-storage" -).version - - -class BigQueryStorageClient(object): - """ - BigQuery storage API. - - The BigQuery storage API can be used to read data stored in BigQuery. - """ - - SERVICE_ADDRESS = "bigquerystorage.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BigQueryStorageClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.BigQueryStorageGrpcTransport, - Callable[[~.Credentials, type], ~.BigQueryStorageGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = big_query_storage_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: # pragma: no cover - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=big_query_storage_grpc_transport.BigQueryStorageGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = big_query_storage_grpc_transport.BigQueryStorageGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_read_session( - self, - table_reference, - parent, - table_modifiers=None, - requested_streams=None, - read_options=None, - format_=None, - sharding_strategy=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new read session. A read session divides the contents of a - BigQuery table into one or more streams, which can then be used to read - data from the table. The read session also specifies properties of the - data to be read, such as a list of columns or a push-down filter describing - the rows to be returned. - - A particular row can be read by at most one stream. When the caller has - reached the end of each stream in the session, then all the data in the - table has been read. - - Read sessions automatically expire 24 hours after they are created and do - not require manual clean-up by the caller. - - Example: - >>> from google.cloud import bigquery_storage_v1beta1 - >>> - >>> client = bigquery_storage_v1beta1.BigQueryStorageClient() - >>> - >>> # TODO: Initialize `table_reference`: - >>> table_reference = {} - >>> - >>> # TODO: Initialize `parent`: - >>> parent = '' - >>> - >>> response = client.create_read_session(table_reference, parent) - - Args: - table_reference (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.TableReference]): Required. Reference to the table to read. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_storage_v1beta1.types.TableReference` - parent (str): Required. String of the form ``projects/{project_id}`` indicating the - project this ReadSession is associated with. This is the project that - will be billed for usage. - table_modifiers (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.TableModifiers]): Any modifiers to the Table (e.g. snapshot timestamp). - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_storage_v1beta1.types.TableModifiers` - requested_streams (int): Initial number of streams. If unset or 0, we will - provide a value of streams so as to produce reasonable throughput. Must be - non-negative. The number of streams may be lower than the requested number, - depending on the amount parallelism that is reasonable for the table and - the maximum amount of parallelism allowed by the system. - - Streams must be read starting from offset 0. - read_options (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.TableReadOptions]): Read options for this session (e.g. column selection, filters). - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_storage_v1beta1.types.TableReadOptions` - format_ (~google.cloud.bigquery_storage_v1beta1.types.DataFormat): Data output format. Currently default to Avro. - sharding_strategy (~google.cloud.bigquery_storage_v1beta1.types.ShardingStrategy): The strategy to use for distributing data among multiple streams. Currently - defaults to liquid sharding. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_storage_v1beta1.types.ReadSession` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_read_session" not in self._inner_api_calls: - self._inner_api_calls[ - "create_read_session" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_read_session, - default_retry=self._method_configs["CreateReadSession"].retry, - default_timeout=self._method_configs["CreateReadSession"].timeout, - client_info=self._client_info, - ) - - request = storage_pb2.CreateReadSessionRequest( - table_reference=table_reference, - parent=parent, - table_modifiers=table_modifiers, - requested_streams=requested_streams, - read_options=read_options, - format=format_, - sharding_strategy=sharding_strategy, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [ - ("table_reference.project_id", table_reference.project_id), - ("table_reference.dataset_id", table_reference.dataset_id), - ] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) # pragma: no cover - - return self._inner_api_calls["create_read_session"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def read_rows( - self, - read_position, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Reads rows from the table in the format prescribed by the read session. - Each response contains one or more table rows, up to a maximum of 10 MiB - per response; read requests which attempt to read individual rows larger - than this will fail. - - Each request also returns a set of stream statistics reflecting the - estimated total number of rows in the read stream. This number is computed - based on the total table size and the number of active streams in the read - session, and may change as other streams continue to read data. - - Example: - >>> from google.cloud import bigquery_storage_v1beta1 - >>> - >>> client = bigquery_storage_v1beta1.BigQueryStorageClient() - >>> - >>> # TODO: Initialize `read_position`: - >>> read_position = {} - >>> - >>> for element in client.read_rows(read_position): - ... # process element - ... pass - - Args: - read_position (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.StreamPosition]): Required. Identifier of the position in the stream to start reading from. - The offset requested must be less than the last row read from ReadRows. - Requesting a larger offset is undefined. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_storage_v1beta1.types.StreamPosition` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - Iterable[~google.cloud.bigquery_storage_v1beta1.types.ReadRowsResponse]. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "read_rows" not in self._inner_api_calls: - self._inner_api_calls[ - "read_rows" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.read_rows, - default_retry=self._method_configs["ReadRows"].retry, - default_timeout=self._method_configs["ReadRows"].timeout, - client_info=self._client_info, - ) - - request = storage_pb2.ReadRowsRequest(read_position=read_position) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("read_position.stream.name", read_position.stream.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) # pragma: no cover - - return self._inner_api_calls["read_rows"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def batch_create_read_session_streams( - self, - session, - requested_streams, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates additional streams for a ReadSession. This API can be used to - dynamically adjust the parallelism of a batch processing task upwards by - adding additional workers. - - Example: - >>> from google.cloud import bigquery_storage_v1beta1 - >>> - >>> client = bigquery_storage_v1beta1.BigQueryStorageClient() - >>> - >>> # TODO: Initialize `session`: - >>> session = {} - >>> - >>> # TODO: Initialize `requested_streams`: - >>> requested_streams = 0 - >>> - >>> response = client.batch_create_read_session_streams(session, requested_streams) - - Args: - session (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.ReadSession]): Required. Must be a non-expired session obtained from a call to - CreateReadSession. Only the name field needs to be set. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_storage_v1beta1.types.ReadSession` - requested_streams (int): Required. Number of new streams requested. Must be positive. - Number of added streams may be less than this, see CreateReadSessionRequest - for more information. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_storage_v1beta1.types.BatchCreateReadSessionStreamsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "batch_create_read_session_streams" not in self._inner_api_calls: - self._inner_api_calls[ - "batch_create_read_session_streams" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.batch_create_read_session_streams, - default_retry=self._method_configs[ - "BatchCreateReadSessionStreams" - ].retry, - default_timeout=self._method_configs[ - "BatchCreateReadSessionStreams" - ].timeout, - client_info=self._client_info, - ) - - request = storage_pb2.BatchCreateReadSessionStreamsRequest( - session=session, requested_streams=requested_streams - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("session.name", session.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) # pragma: no cover - - return self._inner_api_calls["batch_create_read_session_streams"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def finalize_stream( - self, - stream, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Triggers the graceful termination of a single stream in a ReadSession. This - API can be used to dynamically adjust the parallelism of a batch processing - task downwards without losing data. - - This API does not delete the stream -- it remains visible in the - ReadSession, and any data processed by the stream is not released to other - streams. However, no additional data will be assigned to the stream once - this call completes. Callers must continue reading data on the stream until - the end of the stream is reached so that data which has already been - assigned to the stream will be processed. - - This method will return an error if there are no other live streams - in the Session, or if SplitReadStream() has been called on the given - Stream. - - Example: - >>> from google.cloud import bigquery_storage_v1beta1 - >>> - >>> client = bigquery_storage_v1beta1.BigQueryStorageClient() - >>> - >>> # TODO: Initialize `stream`: - >>> stream = {} - >>> - >>> client.finalize_stream(stream) - - Args: - stream (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.Stream]): Stream to finalize. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_storage_v1beta1.types.Stream` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "finalize_stream" not in self._inner_api_calls: - self._inner_api_calls[ - "finalize_stream" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.finalize_stream, - default_retry=self._method_configs["FinalizeStream"].retry, - default_timeout=self._method_configs["FinalizeStream"].timeout, - client_info=self._client_info, - ) - - request = storage_pb2.FinalizeStreamRequest(stream=stream) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("stream.name", stream.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) # pragma: no cover - - self._inner_api_calls["finalize_stream"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def split_read_stream( - self, - original_stream, - fraction=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Splits a given read stream into two Streams. These streams are referred - to as the primary and the residual of the split. The original stream can - still be read from in the same manner as before. Both of the returned - streams can also be read from, and the total rows return by both child - streams will be the same as the rows read from the original stream. - - Moreover, the two child streams will be allocated back to back in the - original Stream. Concretely, it is guaranteed that for streams Original, - Primary, and Residual, that Original[0-j] = Primary[0-j] and - Original[j-n] = Residual[0-m] once the streams have been read to - completion. - - This method is guaranteed to be idempotent. - - Example: - >>> from google.cloud import bigquery_storage_v1beta1 - >>> - >>> client = bigquery_storage_v1beta1.BigQueryStorageClient() - >>> - >>> # TODO: Initialize `original_stream`: - >>> original_stream = {} - >>> - >>> response = client.split_read_stream(original_stream) - - Args: - original_stream (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.Stream]): Stream to split. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_storage_v1beta1.types.Stream` - fraction (float): A value in the range (0.0, 1.0) that specifies the fractional point at - which the original stream should be split. The actual split point is - evaluated on pre-filtered rows, so if a filter is provided, then there is - no guarantee that the division of the rows between the new child streams - will be proportional to this fractional value. Additionally, because the - server-side unit for assigning data is collections of rows, this fraction - will always map to to a data storage boundary on the server side. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_storage_v1beta1.types.SplitReadStreamResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "split_read_stream" not in self._inner_api_calls: - self._inner_api_calls[ - "split_read_stream" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.split_read_stream, - default_retry=self._method_configs["SplitReadStream"].retry, - default_timeout=self._method_configs["SplitReadStream"].timeout, - client_info=self._client_info, - ) - - request = storage_pb2.SplitReadStreamRequest( - original_stream=original_stream, fraction=fraction - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("original_stream.name", original_stream.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) # pragma: no cover - - return self._inner_api_calls["split_read_stream"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client_config.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client_config.py deleted file mode 100644 index e33b166f703b..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client_config.py +++ /dev/null @@ -1,67 +0,0 @@ -config = { - "interfaces": { - "google.cloud.bigquery.storage.v1beta1.BigQueryStorage": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - "unary_streaming": ["UNAVAILABLE"], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - }, - "create_read_session": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 120000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 120000, - "total_timeout_millis": 600000, - }, - "read_rows": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 86400000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 86400000, - "total_timeout_millis": 86400000, - }, - }, - "methods": { - "CreateReadSession": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "create_read_session", - }, - "ReadRows": { - "timeout_millis": 86400000, - "retry_codes_name": "unary_streaming", - "retry_params_name": "read_rows", - }, - "BatchCreateReadSessionStreams": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "FinalizeStream": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "SplitReadStream": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/enums.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/enums.py deleted file mode 100644 index bb3b3b7601c9..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/enums.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class DataFormat(enum.IntEnum): - """ - Data format for input or output data. - - Attributes: - DATA_FORMAT_UNSPECIFIED (int): Data format is unspecified. - AVRO (int): Avro is a standard open source row based file format. - See https://avro.apache.org/ for more details. - ARROW (int) - """ - - DATA_FORMAT_UNSPECIFIED = 0 - AVRO = 1 - ARROW = 3 - - -class ShardingStrategy(enum.IntEnum): - """ - Strategy for distributing data among multiple streams in a read session. - - Attributes: - SHARDING_STRATEGY_UNSPECIFIED (int): Same as LIQUID. - LIQUID (int): Assigns data to each stream based on the client's read rate. The faster the - client reads from a stream, the more data is assigned to the stream. In - this strategy, it's possible to read all data from a single stream even if - there are other streams present. - BALANCED (int): Assigns data to each stream such that roughly the same number of rows can - be read from each stream. Because the server-side unit for assigning data - is collections of rows, the API does not guarantee that each stream will - return the same number or rows. Additionally, the limits are enforced based - on the number of pre-filtering rows, so some filters can lead to lopsided - assignments. - """ - - SHARDING_STRATEGY_UNSPECIFIED = 0 - LIQUID = 1 - BALANCED = 2 diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/__init__.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py deleted file mode 100644 index d799b3c21edf..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py +++ /dev/null @@ -1,226 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.bigquery_storage_v1beta1.proto import storage_pb2_grpc - - -class BigQueryStorageGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.bigquery.storage.v1beta1 BigQueryStorage API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", - "https://www.googleapis.com/auth/cloud-platform", - ) - - def __init__( - self, - channel=None, - credentials=None, - address="bigquerystorage.googleapis.com:443", - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: # pragma: no cover - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: # pragma: no cover - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "big_query_storage_stub": storage_pb2_grpc.BigQueryStorageStub(channel) - } - - @classmethod - def create_channel( - cls, address="bigquerystorage.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( # pragma: no cover - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_read_session(self): - """Return the gRPC stub for :meth:`BigQueryStorageClient.create_read_session`. - - Creates a new read session. A read session divides the contents of a - BigQuery table into one or more streams, which can then be used to read - data from the table. The read session also specifies properties of the - data to be read, such as a list of columns or a push-down filter describing - the rows to be returned. - - A particular row can be read by at most one stream. When the caller has - reached the end of each stream in the session, then all the data in the - table has been read. - - Read sessions automatically expire 24 hours after they are created and do - not require manual clean-up by the caller. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["big_query_storage_stub"].CreateReadSession - - @property - def read_rows(self): - """Return the gRPC stub for :meth:`BigQueryStorageClient.read_rows`. - - Reads rows from the table in the format prescribed by the read session. - Each response contains one or more table rows, up to a maximum of 10 MiB - per response; read requests which attempt to read individual rows larger - than this will fail. - - Each request also returns a set of stream statistics reflecting the - estimated total number of rows in the read stream. This number is computed - based on the total table size and the number of active streams in the read - session, and may change as other streams continue to read data. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["big_query_storage_stub"].ReadRows - - @property - def batch_create_read_session_streams(self): - """Return the gRPC stub for :meth:`BigQueryStorageClient.batch_create_read_session_streams`. - - Creates additional streams for a ReadSession. This API can be used to - dynamically adjust the parallelism of a batch processing task upwards by - adding additional workers. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["big_query_storage_stub"].BatchCreateReadSessionStreams - - @property - def finalize_stream(self): - """Return the gRPC stub for :meth:`BigQueryStorageClient.finalize_stream`. - - Triggers the graceful termination of a single stream in a ReadSession. This - API can be used to dynamically adjust the parallelism of a batch processing - task downwards without losing data. - - This API does not delete the stream -- it remains visible in the - ReadSession, and any data processed by the stream is not released to other - streams. However, no additional data will be assigned to the stream once - this call completes. Callers must continue reading data on the stream until - the end of the stream is reached so that data which has already been - assigned to the stream will be processed. - - This method will return an error if there are no other live streams - in the Session, or if SplitReadStream() has been called on the given - Stream. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["big_query_storage_stub"].FinalizeStream - - @property - def split_read_stream(self): - """Return the gRPC stub for :meth:`BigQueryStorageClient.split_read_stream`. - - Splits a given read stream into two Streams. These streams are referred - to as the primary and the residual of the split. The original stream can - still be read from in the same manner as before. Both of the returned - streams can also be read from, and the total rows return by both child - streams will be the same as the rows read from the original stream. - - Moreover, the two child streams will be allocated back to back in the - original Stream. Concretely, it is guaranteed that for streams Original, - Primary, and Residual, that Original[0-j] = Primary[0-j] and - Original[j-n] = Residual[0-m] once the streams have been read to - completion. - - This method is guaranteed to be idempotent. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["big_query_storage_stub"].SplitReadStream diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/__init__.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/arrow.proto b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/arrow.proto deleted file mode 100644 index 3003de444c2d..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/arrow.proto +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1beta1; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; -option java_outer_classname = "ArrowProto"; -option java_package = "com.google.cloud.bigquery.storage.v1beta1"; - -// Arrow schema. -message ArrowSchema { - // IPC serialized Arrow schema. - bytes serialized_schema = 1; -} - -// Arrow RecordBatch. -message ArrowRecordBatch { - // IPC serialized Arrow RecordBatch. - bytes serialized_record_batch = 1; - - // The count of rows in the returning block. - int64 row_count = 2; -} diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/arrow_pb2.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/arrow_pb2.py deleted file mode 100644 index 4d7d90afaf9e..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/arrow_pb2.py +++ /dev/null @@ -1,170 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery/storage_v1beta1/proto/arrow.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery/storage_v1beta1/proto/arrow.proto", - package="google.cloud.bigquery.storage.v1beta1", - syntax="proto3", - serialized_options=_b( - "\n)com.google.cloud.bigquery.storage.v1beta1B\nArrowProtoZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage" - ), - serialized_pb=_b( - '\n7google/cloud/bigquery/storage_v1beta1/proto/arrow.proto\x12%google.cloud.bigquery.storage.v1beta1"(\n\x0b\x41rrowSchema\x12\x19\n\x11serialized_schema\x18\x01 \x01(\x0c"F\n\x10\x41rrowRecordBatch\x12\x1f\n\x17serialized_record_batch\x18\x01 \x01(\x0c\x12\x11\n\trow_count\x18\x02 \x01(\x03\x42\x85\x01\n)com.google.cloud.bigquery.storage.v1beta1B\nArrowProtoZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storageb\x06proto3' - ), -) - - -_ARROWSCHEMA = _descriptor.Descriptor( - name="ArrowSchema", - full_name="google.cloud.bigquery.storage.v1beta1.ArrowSchema", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="serialized_schema", - full_name="google.cloud.bigquery.storage.v1beta1.ArrowSchema.serialized_schema", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b(""), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=98, - serialized_end=138, -) - - -_ARROWRECORDBATCH = _descriptor.Descriptor( - name="ArrowRecordBatch", - full_name="google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="serialized_record_batch", - full_name="google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.serialized_record_batch", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b(""), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="row_count", - full_name="google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.row_count", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=140, - serialized_end=210, -) - -DESCRIPTOR.message_types_by_name["ArrowSchema"] = _ARROWSCHEMA -DESCRIPTOR.message_types_by_name["ArrowRecordBatch"] = _ARROWRECORDBATCH -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ArrowSchema = _reflection.GeneratedProtocolMessageType( - "ArrowSchema", - (_message.Message,), - dict( - DESCRIPTOR=_ARROWSCHEMA, - __module__="google.cloud.bigquery.storage_v1beta1.proto.arrow_pb2", - __doc__="""Arrow schema. - - - Attributes: - serialized_schema: - IPC serialized Arrow schema. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.ArrowSchema) - ), -) -_sym_db.RegisterMessage(ArrowSchema) - -ArrowRecordBatch = _reflection.GeneratedProtocolMessageType( - "ArrowRecordBatch", - (_message.Message,), - dict( - DESCRIPTOR=_ARROWRECORDBATCH, - __module__="google.cloud.bigquery.storage_v1beta1.proto.arrow_pb2", - __doc__="""Arrow RecordBatch. - - - Attributes: - serialized_record_batch: - IPC serialized Arrow RecordBatch. - row_count: - The count of rows in the returning block. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch) - ), -) -_sym_db.RegisterMessage(ArrowRecordBatch) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/arrow_pb2_grpc.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/arrow_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/arrow_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/avro.proto b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/avro.proto deleted file mode 100644 index 021d8e44f9f2..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/avro.proto +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1beta1; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; -option java_outer_classname = "AvroProto"; -option java_package = "com.google.cloud.bigquery.storage.v1beta1"; - -// Avro schema. -message AvroSchema { - // Json serialized schema, as described at - // https://avro.apache.org/docs/1.8.1/spec.html - string schema = 1; -} - -// Avro rows. -message AvroRows { - // Binary serialized rows in a block. - bytes serialized_binary_rows = 1; - - // The count of rows in the returning block. - int64 row_count = 2; -} diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/avro_pb2.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/avro_pb2.py deleted file mode 100644 index 1f5ee11d4022..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/avro_pb2.py +++ /dev/null @@ -1,171 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery/storage_v1beta1/proto/avro.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery/storage_v1beta1/proto/avro.proto", - package="google.cloud.bigquery.storage.v1beta1", - syntax="proto3", - serialized_options=_b( - "\n)com.google.cloud.bigquery.storage.v1beta1B\tAvroProtoZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage" - ), - serialized_pb=_b( - '\n6google/cloud/bigquery/storage_v1beta1/proto/avro.proto\x12%google.cloud.bigquery.storage.v1beta1"\x1c\n\nAvroSchema\x12\x0e\n\x06schema\x18\x01 \x01(\t"=\n\x08\x41vroRows\x12\x1e\n\x16serialized_binary_rows\x18\x01 \x01(\x0c\x12\x11\n\trow_count\x18\x02 \x01(\x03\x42\x84\x01\n)com.google.cloud.bigquery.storage.v1beta1B\tAvroProtoZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storageb\x06proto3' - ), -) - - -_AVROSCHEMA = _descriptor.Descriptor( - name="AvroSchema", - full_name="google.cloud.bigquery.storage.v1beta1.AvroSchema", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="schema", - full_name="google.cloud.bigquery.storage.v1beta1.AvroSchema.schema", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=97, - serialized_end=125, -) - - -_AVROROWS = _descriptor.Descriptor( - name="AvroRows", - full_name="google.cloud.bigquery.storage.v1beta1.AvroRows", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="serialized_binary_rows", - full_name="google.cloud.bigquery.storage.v1beta1.AvroRows.serialized_binary_rows", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b(""), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="row_count", - full_name="google.cloud.bigquery.storage.v1beta1.AvroRows.row_count", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=127, - serialized_end=188, -) - -DESCRIPTOR.message_types_by_name["AvroSchema"] = _AVROSCHEMA -DESCRIPTOR.message_types_by_name["AvroRows"] = _AVROROWS -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -AvroSchema = _reflection.GeneratedProtocolMessageType( - "AvroSchema", - (_message.Message,), - dict( - DESCRIPTOR=_AVROSCHEMA, - __module__="google.cloud.bigquery.storage_v1beta1.proto.avro_pb2", - __doc__="""Avro schema. - - - Attributes: - schema: - Json serialized schema, as described at - https://avro.apache.org/docs/1.8.1/spec.html - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.AvroSchema) - ), -) -_sym_db.RegisterMessage(AvroSchema) - -AvroRows = _reflection.GeneratedProtocolMessageType( - "AvroRows", - (_message.Message,), - dict( - DESCRIPTOR=_AVROROWS, - __module__="google.cloud.bigquery.storage_v1beta1.proto.avro_pb2", - __doc__="""Avro rows. - - - Attributes: - serialized_binary_rows: - Binary serialized rows in a block. - row_count: - The count of rows in the returning block. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.AvroRows) - ), -) -_sym_db.RegisterMessage(AvroRows) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/avro_pb2_grpc.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/avro_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/avro_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/read_options.proto b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/read_options.proto deleted file mode 100644 index 9591deba7f47..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/read_options.proto +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1beta1; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; -option java_package = "com.google.cloud.bigquery.storage.v1beta1"; - -// Options dictating how we read a table. -message TableReadOptions { - // Optional. Names of the fields in the table that should be read. If empty, - // all fields will be read. If the specified field is a nested field, all the - // sub-fields in the field will be selected. The output field order is - // unrelated to the order of fields in selected_fields. - repeated string selected_fields = 1; - - // Optional. SQL text filtering statement, similar to a WHERE clause in - // a query. Currently, only a single predicate that is a comparison between - // a column and a constant value is supported. Aggregates are not supported. - // - // Examples: "int_field > 5" - // "date_field = CAST('2014-9-27' as DATE)" - // "nullable_field is not NULL" - // "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" - // "numeric_field BETWEEN 1.0 AND 5.0" - string row_restriction = 2; -} diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/read_options_pb2.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/read_options_pb2.py deleted file mode 100644 index 7291232de3c5..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/read_options_pb2.py +++ /dev/null @@ -1,123 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery/storage_v1beta1/proto/read_options.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery/storage_v1beta1/proto/read_options.proto", - package="google.cloud.bigquery.storage.v1beta1", - syntax="proto3", - serialized_options=_b( - "\n)com.google.cloud.bigquery.storage.v1beta1ZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage" - ), - serialized_pb=_b( - '\n>google/cloud/bigquery/storage_v1beta1/proto/read_options.proto\x12%google.cloud.bigquery.storage.v1beta1"D\n\x10TableReadOptions\x12\x17\n\x0fselected_fields\x18\x01 \x03(\t\x12\x17\n\x0frow_restriction\x18\x02 \x01(\tBy\n)com.google.cloud.bigquery.storage.v1beta1ZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storageb\x06proto3' - ), -) - - -_TABLEREADOPTIONS = _descriptor.Descriptor( - name="TableReadOptions", - full_name="google.cloud.bigquery.storage.v1beta1.TableReadOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="selected_fields", - full_name="google.cloud.bigquery.storage.v1beta1.TableReadOptions.selected_fields", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="row_restriction", - full_name="google.cloud.bigquery.storage.v1beta1.TableReadOptions.row_restriction", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=105, - serialized_end=173, -) - -DESCRIPTOR.message_types_by_name["TableReadOptions"] = _TABLEREADOPTIONS -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -TableReadOptions = _reflection.GeneratedProtocolMessageType( - "TableReadOptions", - (_message.Message,), - dict( - DESCRIPTOR=_TABLEREADOPTIONS, - __module__="google.cloud.bigquery.storage_v1beta1.proto.read_options_pb2", - __doc__="""Options dictating how we read a table. - - - Attributes: - selected_fields: - Optional. Names of the fields in the table that should be - read. If empty, all fields will be read. If the specified - field is a nested field, all the sub-fields in the field will - be selected. The output field order is unrelated to the order - of fields in selected\_fields. - row_restriction: - Optional. SQL text filtering statement, similar to a WHERE - clause in a query. Currently, only a single predicate that is - a comparison between a column and a constant value is - supported. Aggregates are not supported. Examples: - "int\_field > 5" "date\_field = CAST('2014-9-27' as DATE)" - "nullable\_field is not NULL" "st\_equals(geo\_field, - st\_geofromtext("POINT(2, 2)"))" "numeric\_field BETWEEN 1.0 - AND 5.0" - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.TableReadOptions) - ), -) -_sym_db.RegisterMessage(TableReadOptions) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/read_options_pb2_grpc.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/read_options_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/read_options_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage.proto b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage.proto deleted file mode 100644 index 22f742fbb654..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage.proto +++ /dev/null @@ -1,405 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1beta1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/storage/v1beta1/arrow.proto"; -import "google/cloud/bigquery/storage/v1beta1/avro.proto"; -import "google/cloud/bigquery/storage/v1beta1/read_options.proto"; -import "google/cloud/bigquery/storage/v1beta1/table_reference.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; -option java_package = "com.google.cloud.bigquery.storage.v1beta1"; - -// BigQuery storage API. -// -// The BigQuery storage API can be used to read data stored in BigQuery. -service BigQueryStorage { - option (google.api.default_host) = "bigquerystorage.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/bigquery," - "https://www.googleapis.com/auth/bigquery.readonly," - "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a new read session. A read session divides the contents of a - // BigQuery table into one or more streams, which can then be used to read - // data from the table. The read session also specifies properties of the - // data to be read, such as a list of columns or a push-down filter describing - // the rows to be returned. - // - // A particular row can be read by at most one stream. When the caller has - // reached the end of each stream in the session, then all the data in the - // table has been read. - // - // Read sessions automatically expire 24 hours after they are created and do - // not require manual clean-up by the caller. - rpc CreateReadSession(CreateReadSessionRequest) returns (ReadSession) { - option (google.api.http) = { - post: "/v1beta1/{table_reference.project_id=projects/*}" - body: "*" - additional_bindings { - post: "/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}" - body: "*" - } - }; - option (google.api.method_signature) = "table_reference,parent,requested_streams"; - } - - // Reads rows from the table in the format prescribed by the read session. - // Each response contains one or more table rows, up to a maximum of 10 MiB - // per response; read requests which attempt to read individual rows larger - // than this will fail. - // - // Each request also returns a set of stream statistics reflecting the - // estimated total number of rows in the read stream. This number is computed - // based on the total table size and the number of active streams in the read - // session, and may change as other streams continue to read data. - rpc ReadRows(ReadRowsRequest) returns (stream ReadRowsResponse) { - option (google.api.http) = { - get: "/v1beta1/{read_position.stream.name=projects/*/streams/*}" - }; - option (google.api.method_signature) = "read_position"; - } - - // Creates additional streams for a ReadSession. This API can be used to - // dynamically adjust the parallelism of a batch processing task upwards by - // adding additional workers. - rpc BatchCreateReadSessionStreams(BatchCreateReadSessionStreamsRequest) returns (BatchCreateReadSessionStreamsResponse) { - option (google.api.http) = { - post: "/v1beta1/{session.name=projects/*/sessions/*}" - body: "*" - }; - option (google.api.method_signature) = "session,requested_streams"; - } - - // Triggers the graceful termination of a single stream in a ReadSession. This - // API can be used to dynamically adjust the parallelism of a batch processing - // task downwards without losing data. - // - // This API does not delete the stream -- it remains visible in the - // ReadSession, and any data processed by the stream is not released to other - // streams. However, no additional data will be assigned to the stream once - // this call completes. Callers must continue reading data on the stream until - // the end of the stream is reached so that data which has already been - // assigned to the stream will be processed. - // - // This method will return an error if there are no other live streams - // in the Session, or if SplitReadStream() has been called on the given - // Stream. - rpc FinalizeStream(FinalizeStreamRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v1beta1/{stream.name=projects/*/streams/*}" - body: "*" - }; - option (google.api.method_signature) = "stream"; - } - - // Splits a given read stream into two Streams. These streams are referred to - // as the primary and the residual of the split. The original stream can still - // be read from in the same manner as before. Both of the returned streams can - // also be read from, and the total rows return by both child streams will be - // the same as the rows read from the original stream. - // - // Moreover, the two child streams will be allocated back to back in the - // original Stream. Concretely, it is guaranteed that for streams Original, - // Primary, and Residual, that Original[0-j] = Primary[0-j] and - // Original[j-n] = Residual[0-m] once the streams have been read to - // completion. - // - // This method is guaranteed to be idempotent. - rpc SplitReadStream(SplitReadStreamRequest) returns (SplitReadStreamResponse) { - option (google.api.http) = { - get: "/v1beta1/{original_stream.name=projects/*/streams/*}" - }; - option (google.api.method_signature) = "original_stream"; - } -} - -// Information about a single data stream within a read session. -message Stream { - option (google.api.resource) = { - type: "bigquerystorage.googleapis.com/Stream" - pattern: "projects/{project}/locations/{location}/streams/{stream}" - }; - - // Name of the stream, in the form - // `projects/{project_id}/locations/{location}/streams/{stream_id}`. - string name = 1; -} - -// Expresses a point within a given stream using an offset position. -message StreamPosition { - // Identifier for a given Stream. - Stream stream = 1; - - // Position in the stream. - int64 offset = 2; -} - -// Information returned from a `CreateReadSession` request. -message ReadSession { - option (google.api.resource) = { - type: "bigquerystorage.googleapis.com/ReadSession" - pattern: "projects/{project}/locations/{location}/sessions/{session}" - }; - - // Unique identifier for the session, in the form - // `projects/{project_id}/locations/{location}/sessions/{session_id}`. - string name = 1; - - // Time at which the session becomes invalid. After this time, subsequent - // requests to read this Session will return errors. - google.protobuf.Timestamp expire_time = 2; - - // The schema for the read. If read_options.selected_fields is set, the - // schema may be different from the table schema as it will only contain - // the selected fields. - oneof schema { - // Avro schema. - AvroSchema avro_schema = 5; - - // Arrow schema. - ArrowSchema arrow_schema = 6; - } - - // Streams associated with this session. - repeated Stream streams = 4; - - // Table that this ReadSession is reading from. - TableReference table_reference = 7; - - // Any modifiers which are applied when reading from the specified table. - TableModifiers table_modifiers = 8; - - // The strategy to use for distributing data among the streams. - ShardingStrategy sharding_strategy = 9; -} - -// Creates a new read session, which may include additional options such as -// requested parallelism, projection filters and constraints. -message CreateReadSessionRequest { - // Required. Reference to the table to read. - TableReference table_reference = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. String of the form `projects/{project_id}` indicating the - // project this ReadSession is associated with. This is the project that will - // be billed for usage. - string parent = 6 [(google.api.field_behavior) = REQUIRED]; - - // Any modifiers to the Table (e.g. snapshot timestamp). - TableModifiers table_modifiers = 2; - - // Initial number of streams. If unset or 0, we will - // provide a value of streams so as to produce reasonable throughput. Must be - // non-negative. The number of streams may be lower than the requested number, - // depending on the amount parallelism that is reasonable for the table and - // the maximum amount of parallelism allowed by the system. - // - // Streams must be read starting from offset 0. - int32 requested_streams = 3; - - // Read options for this session (e.g. column selection, filters). - TableReadOptions read_options = 4; - - // Data output format. Currently default to Avro. - DataFormat format = 5; - - // The strategy to use for distributing data among multiple streams. Currently - // defaults to liquid sharding. - ShardingStrategy sharding_strategy = 7; -} - -// Data format for input or output data. -enum DataFormat { - // Data format is unspecified. - DATA_FORMAT_UNSPECIFIED = 0; - - // Avro is a standard open source row based file format. - // See https://avro.apache.org/ for more details. - AVRO = 1; - - ARROW = 3; -} - -// Strategy for distributing data among multiple streams in a read session. -enum ShardingStrategy { - // Same as LIQUID. - SHARDING_STRATEGY_UNSPECIFIED = 0; - - // Assigns data to each stream based on the client's read rate. The faster the - // client reads from a stream, the more data is assigned to the stream. In - // this strategy, it's possible to read all data from a single stream even if - // there are other streams present. - LIQUID = 1; - - // Assigns data to each stream such that roughly the same number of rows can - // be read from each stream. Because the server-side unit for assigning data - // is collections of rows, the API does not guarantee that each stream will - // return the same number or rows. Additionally, the limits are enforced based - // on the number of pre-filtering rows, so some filters can lead to lopsided - // assignments. - BALANCED = 2; -} - -// Requesting row data via `ReadRows` must provide Stream position information. -message ReadRowsRequest { - // Required. Identifier of the position in the stream to start reading from. - // The offset requested must be less than the last row read from ReadRows. - // Requesting a larger offset is undefined. - StreamPosition read_position = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// Progress information for a given Stream. -message StreamStatus { - // Number of estimated rows in the current stream. May change over time as - // different readers in the stream progress at rates which are relatively fast - // or slow. - int64 estimated_row_count = 1; - - // A value in the range [0.0, 1.0] that represents the fraction of rows - // assigned to this stream that have been processed by the server. In the - // presence of read filters, the server may process more rows than it returns, - // so this value reflects progress through the pre-filtering rows. - // - // This value is only populated for sessions created through the BALANCED - // sharding strategy. - float fraction_consumed = 2; - - // Represents the progress of the current stream. - // - // Note: This value is under development and should not be used. Use - // `fraction_consumed` instead. - Progress progress = 4; - - // Whether this stream can be split. For sessions that use the LIQUID sharding - // strategy, this value is always false. For BALANCED sessions, this value is - // false when enough data have been read such that no more splits are possible - // at that point or beyond. For small tables or streams that are the result of - // a chain of splits, this value may never be true. - bool is_splittable = 3; -} - -message Progress { - // The fraction of rows assigned to the stream that have been processed by the - // server so far, not including the rows in the current response message. - // - // This value, along with `at_response_end`, can be used to interpolate the - // progress made as the rows in the message are being processed using the - // following formula: `at_response_start + (at_response_end - - // at_response_start) * rows_processed_from_response / rows_in_response`. - // - // Note that if a filter is provided, the `at_response_end` value of the - // previous response may not necessarily be equal to the `at_response_start` - // value of the current response. - float at_response_start = 1; - - // Similar to `at_response_start`, except that this value includes the rows in - // the current response. - float at_response_end = 2; -} - -// Information on if the current connection is being throttled. -message ThrottleStatus { - // How much this connection is being throttled. - // 0 is no throttling, 100 is completely throttled. - int32 throttle_percent = 1; -} - -// Response from calling `ReadRows` may include row data, progress and -// throttling information. -message ReadRowsResponse { - // Row data is returned in format specified during session creation. - oneof rows { - // Serialized row data in AVRO format. - AvroRows avro_rows = 3; - - // Serialized row data in Arrow RecordBatch format. - ArrowRecordBatch arrow_record_batch = 4; - } - - // Number of serialized rows in the rows block. This value is recorded here, - // in addition to the row_count values in the output-specific messages in - // `rows`, so that code which needs to record progress through the stream can - // do so in an output format-independent way. - int64 row_count = 6; - - // Estimated stream statistics. - StreamStatus status = 2; - - // Throttling status. If unset, the latest response still describes - // the current throttling status. - ThrottleStatus throttle_status = 5; -} - -// Information needed to request additional streams for an established read -// session. -message BatchCreateReadSessionStreamsRequest { - // Required. Must be a non-expired session obtained from a call to - // CreateReadSession. Only the name field needs to be set. - ReadSession session = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. Number of new streams requested. Must be positive. - // Number of added streams may be less than this, see CreateReadSessionRequest - // for more information. - int32 requested_streams = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// The response from `BatchCreateReadSessionStreams` returns the stream -// identifiers for the newly created streams. -message BatchCreateReadSessionStreamsResponse { - // Newly added streams. - repeated Stream streams = 1; -} - -// Request information for invoking `FinalizeStream`. -message FinalizeStreamRequest { - // Stream to finalize. - Stream stream = 2; -} - -// Request information for `SplitReadStream`. -message SplitReadStreamRequest { - // Stream to split. - Stream original_stream = 1; - - // A value in the range (0.0, 1.0) that specifies the fractional point at - // which the original stream should be split. The actual split point is - // evaluated on pre-filtered rows, so if a filter is provided, then there is - // no guarantee that the division of the rows between the new child streams - // will be proportional to this fractional value. Additionally, because the - // server-side unit for assigning data is collections of rows, this fraction - // will always map to to a data storage boundary on the server side. - float fraction = 2; -} - -// Response from `SplitReadStream`. -message SplitReadStreamResponse { - // Primary stream, which contains the beginning portion of - // |original_stream|. An empty value indicates that the original stream can no - // longer be split. - Stream primary_stream = 1; - - // Remainder stream, which contains the tail of |original_stream|. An empty - // value indicates that the original stream can no longer be split. - Stream remainder_stream = 2; -} diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py deleted file mode 100644 index 500d277c6cf5..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py +++ /dev/null @@ -1,1716 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery/storage_v1beta1/proto/storage.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.bigquery_storage_v1beta1.proto import ( - arrow_pb2 as google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_arrow__pb2, -) -from google.cloud.bigquery_storage_v1beta1.proto import ( - avro_pb2 as google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_avro__pb2, -) -from google.cloud.bigquery_storage_v1beta1.proto import ( - read_options_pb2 as google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_read__options__pb2, -) -from google.cloud.bigquery_storage_v1beta1.proto import ( - table_reference_pb2 as google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_table__reference__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery/storage_v1beta1/proto/storage.proto", - package="google.cloud.bigquery.storage.v1beta1", - syntax="proto3", - serialized_options=_b( - "\n)com.google.cloud.bigquery.storage.v1beta1ZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage" - ), - serialized_pb=_b( - '\n9google/cloud/bigquery/storage_v1beta1/proto/storage.proto\x12%google.cloud.bigquery.storage.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x37google/cloud/bigquery/storage_v1beta1/proto/arrow.proto\x1a\x36google/cloud/bigquery/storage_v1beta1/proto/avro.proto\x1a>google/cloud/bigquery/storage_v1beta1/proto/read_options.proto\x1a\x41google/cloud/bigquery/storage_v1beta1/proto/table_reference.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto"|\n\x06Stream\x12\x0c\n\x04name\x18\x01 \x01(\t:d\xea\x41\x61\n%bigquerystorage.googleapis.com/Stream\x12\x38projects/{project}/locations/{location}/streams/{stream}"_\n\x0eStreamPosition\x12=\n\x06stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12\x0e\n\x06offset\x18\x02 \x01(\x03"\x8d\x05\n\x0bReadSession\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12H\n\x0b\x61vro_schema\x18\x05 \x01(\x0b\x32\x31.google.cloud.bigquery.storage.v1beta1.AvroSchemaH\x00\x12J\n\x0c\x61rrow_schema\x18\x06 \x01(\x0b\x32\x32.google.cloud.bigquery.storage.v1beta1.ArrowSchemaH\x00\x12>\n\x07streams\x18\x04 \x03(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12N\n\x0ftable_reference\x18\x07 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableReference\x12N\n\x0ftable_modifiers\x18\x08 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableModifiers\x12R\n\x11sharding_strategy\x18\t \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1beta1.ShardingStrategy:k\xea\x41h\n*bigquerystorage.googleapis.com/ReadSession\x12:projects/{project}/locations/{location}/sessions/{session}B\x08\n\x06schema"\xd5\x03\n\x18\x43reateReadSessionRequest\x12S\n\x0ftable_reference\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableReferenceB\x03\xe0\x41\x02\x12\x13\n\x06parent\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12N\n\x0ftable_modifiers\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableModifiers\x12\x19\n\x11requested_streams\x18\x03 \x01(\x05\x12M\n\x0cread_options\x18\x04 \x01(\x0b\x32\x37.google.cloud.bigquery.storage.v1beta1.TableReadOptions\x12\x41\n\x06\x66ormat\x18\x05 \x01(\x0e\x32\x31.google.cloud.bigquery.storage.v1beta1.DataFormat\x12R\n\x11sharding_strategy\x18\x07 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1beta1.ShardingStrategy"d\n\x0fReadRowsRequest\x12Q\n\rread_position\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.StreamPositionB\x03\xe0\x41\x02"\xa0\x01\n\x0cStreamStatus\x12\x1b\n\x13\x65stimated_row_count\x18\x01 \x01(\x03\x12\x19\n\x11\x66raction_consumed\x18\x02 \x01(\x02\x12\x41\n\x08progress\x18\x04 \x01(\x0b\x32/.google.cloud.bigquery.storage.v1beta1.Progress\x12\x15\n\ris_splittable\x18\x03 \x01(\x08">\n\x08Progress\x12\x19\n\x11\x61t_response_start\x18\x01 \x01(\x02\x12\x17\n\x0f\x61t_response_end\x18\x02 \x01(\x02"*\n\x0eThrottleStatus\x12\x18\n\x10throttle_percent\x18\x01 \x01(\x05"\xdf\x02\n\x10ReadRowsResponse\x12\x44\n\tavro_rows\x18\x03 \x01(\x0b\x32/.google.cloud.bigquery.storage.v1beta1.AvroRowsH\x00\x12U\n\x12\x61rrow_record_batch\x18\x04 \x01(\x0b\x32\x37.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatchH\x00\x12\x11\n\trow_count\x18\x06 \x01(\x03\x12\x43\n\x06status\x18\x02 \x01(\x0b\x32\x33.google.cloud.bigquery.storage.v1beta1.StreamStatus\x12N\n\x0fthrottle_status\x18\x05 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.ThrottleStatusB\x06\n\x04rows"\x90\x01\n$BatchCreateReadSessionStreamsRequest\x12H\n\x07session\x18\x01 \x01(\x0b\x32\x32.google.cloud.bigquery.storage.v1beta1.ReadSessionB\x03\xe0\x41\x02\x12\x1e\n\x11requested_streams\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02"g\n%BatchCreateReadSessionStreamsResponse\x12>\n\x07streams\x18\x01 \x03(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream"V\n\x15\x46inalizeStreamRequest\x12=\n\x06stream\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream"r\n\x16SplitReadStreamRequest\x12\x46\n\x0foriginal_stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12\x10\n\x08\x66raction\x18\x02 \x01(\x02"\xa9\x01\n\x17SplitReadStreamResponse\x12\x45\n\x0eprimary_stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12G\n\x10remainder_stream\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream*>\n\nDataFormat\x12\x1b\n\x17\x44\x41TA_FORMAT_UNSPECIFIED\x10\x00\x12\x08\n\x04\x41VRO\x10\x01\x12\t\n\x05\x41RROW\x10\x03*O\n\x10ShardingStrategy\x12!\n\x1dSHARDING_STRATEGY_UNSPECIFIED\x10\x00\x12\n\n\x06LIQUID\x10\x01\x12\x0c\n\x08\x42\x41LANCED\x10\x02\x32\xeb\n\n\x0f\x42igQueryStorage\x12\xb3\x02\n\x11\x43reateReadSession\x12?.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest\x1a\x32.google.cloud.bigquery.storage.v1beta1.ReadSession"\xa8\x01\x82\xd3\xe4\x93\x02w"0/v1beta1/{table_reference.project_id=projects/*}:\x01*Z@";/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}:\x01*\xda\x41(table_reference,parent,requested_streams\x12\xd0\x01\n\x08ReadRows\x12\x36.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest\x1a\x37.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse"Q\x82\xd3\xe4\x93\x02;\x12\x39/v1beta1/{read_position.stream.name=projects/*/streams/*}\xda\x41\rread_position0\x01\x12\x90\x02\n\x1d\x42\x61tchCreateReadSessionStreams\x12K.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest\x1aL.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse"T\x82\xd3\xe4\x93\x02\x32"-/v1beta1/{session.name=projects/*/sessions/*}:\x01*\xda\x41\x19session,requested_streams\x12\xa7\x01\n\x0e\x46inalizeStream\x12<.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest\x1a\x16.google.protobuf.Empty"?\x82\xd3\xe4\x93\x02\x30"+/v1beta1/{stream.name=projects/*/streams/*}:\x01*\xda\x41\x06stream\x12\xe0\x01\n\x0fSplitReadStream\x12=.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest\x1a>.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse"N\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta1/{original_stream.name=projects/*/streams/*}\xda\x41\x0foriginal_stream\x1a\xae\x01\xca\x41\x1e\x62igquerystorage.googleapis.com\xd2\x41\x89\x01https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platformBy\n)com.google.cloud.bigquery.storage.v1beta1ZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storageb\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_arrow__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_avro__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_read__options__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_table__reference__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - -_DATAFORMAT = _descriptor.EnumDescriptor( - name="DataFormat", - full_name="google.cloud.bigquery.storage.v1beta1.DataFormat", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="DATA_FORMAT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="AVRO", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ARROW", index=2, number=3, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3227, - serialized_end=3289, -) -_sym_db.RegisterEnumDescriptor(_DATAFORMAT) - -DataFormat = enum_type_wrapper.EnumTypeWrapper(_DATAFORMAT) -_SHARDINGSTRATEGY = _descriptor.EnumDescriptor( - name="ShardingStrategy", - full_name="google.cloud.bigquery.storage.v1beta1.ShardingStrategy", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="SHARDING_STRATEGY_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="LIQUID", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="BALANCED", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3291, - serialized_end=3370, -) -_sym_db.RegisterEnumDescriptor(_SHARDINGSTRATEGY) - -ShardingStrategy = enum_type_wrapper.EnumTypeWrapper(_SHARDINGSTRATEGY) -DATA_FORMAT_UNSPECIFIED = 0 -AVRO = 1 -ARROW = 3 -SHARDING_STRATEGY_UNSPECIFIED = 0 -LIQUID = 1 -BALANCED = 2 - - -_STREAM = _descriptor.Descriptor( - name="Stream", - full_name="google.cloud.bigquery.storage.v1beta1.Stream", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.storage.v1beta1.Stream.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b( - "\352Aa\n%bigquerystorage.googleapis.com/Stream\0228projects/{project}/locations/{location}/streams/{stream}" - ), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=521, - serialized_end=645, -) - - -_STREAMPOSITION = _descriptor.Descriptor( - name="StreamPosition", - full_name="google.cloud.bigquery.storage.v1beta1.StreamPosition", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="stream", - full_name="google.cloud.bigquery.storage.v1beta1.StreamPosition.stream", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="offset", - full_name="google.cloud.bigquery.storage.v1beta1.StreamPosition.offset", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=647, - serialized_end=742, -) - - -_READSESSION = _descriptor.Descriptor( - name="ReadSession", - full_name="google.cloud.bigquery.storage.v1beta1.ReadSession", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.storage.v1beta1.ReadSession.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="expire_time", - full_name="google.cloud.bigquery.storage.v1beta1.ReadSession.expire_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="avro_schema", - full_name="google.cloud.bigquery.storage.v1beta1.ReadSession.avro_schema", - index=2, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="arrow_schema", - full_name="google.cloud.bigquery.storage.v1beta1.ReadSession.arrow_schema", - index=3, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="streams", - full_name="google.cloud.bigquery.storage.v1beta1.ReadSession.streams", - index=4, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="table_reference", - full_name="google.cloud.bigquery.storage.v1beta1.ReadSession.table_reference", - index=5, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="table_modifiers", - full_name="google.cloud.bigquery.storage.v1beta1.ReadSession.table_modifiers", - index=6, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sharding_strategy", - full_name="google.cloud.bigquery.storage.v1beta1.ReadSession.sharding_strategy", - index=7, - number=9, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b( - "\352Ah\n*bigquerystorage.googleapis.com/ReadSession\022:projects/{project}/locations/{location}/sessions/{session}" - ), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="schema", - full_name="google.cloud.bigquery.storage.v1beta1.ReadSession.schema", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=745, - serialized_end=1398, -) - - -_CREATEREADSESSIONREQUEST = _descriptor.Descriptor( - name="CreateReadSessionRequest", - full_name="google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="table_reference", - full_name="google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.table_reference", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.parent", - index=1, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="table_modifiers", - full_name="google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.table_modifiers", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="requested_streams", - full_name="google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.requested_streams", - index=3, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="read_options", - full_name="google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.read_options", - index=4, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="format", - full_name="google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.format", - index=5, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sharding_strategy", - full_name="google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.sharding_strategy", - index=6, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1401, - serialized_end=1870, -) - - -_READROWSREQUEST = _descriptor.Descriptor( - name="ReadRowsRequest", - full_name="google.cloud.bigquery.storage.v1beta1.ReadRowsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="read_position", - full_name="google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.read_position", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1872, - serialized_end=1972, -) - - -_STREAMSTATUS = _descriptor.Descriptor( - name="StreamStatus", - full_name="google.cloud.bigquery.storage.v1beta1.StreamStatus", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="estimated_row_count", - full_name="google.cloud.bigquery.storage.v1beta1.StreamStatus.estimated_row_count", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fraction_consumed", - full_name="google.cloud.bigquery.storage.v1beta1.StreamStatus.fraction_consumed", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="progress", - full_name="google.cloud.bigquery.storage.v1beta1.StreamStatus.progress", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="is_splittable", - full_name="google.cloud.bigquery.storage.v1beta1.StreamStatus.is_splittable", - index=3, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1975, - serialized_end=2135, -) - - -_PROGRESS = _descriptor.Descriptor( - name="Progress", - full_name="google.cloud.bigquery.storage.v1beta1.Progress", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="at_response_start", - full_name="google.cloud.bigquery.storage.v1beta1.Progress.at_response_start", - index=0, - number=1, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="at_response_end", - full_name="google.cloud.bigquery.storage.v1beta1.Progress.at_response_end", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2137, - serialized_end=2199, -) - - -_THROTTLESTATUS = _descriptor.Descriptor( - name="ThrottleStatus", - full_name="google.cloud.bigquery.storage.v1beta1.ThrottleStatus", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="throttle_percent", - full_name="google.cloud.bigquery.storage.v1beta1.ThrottleStatus.throttle_percent", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2201, - serialized_end=2243, -) - - -_READROWSRESPONSE = _descriptor.Descriptor( - name="ReadRowsResponse", - full_name="google.cloud.bigquery.storage.v1beta1.ReadRowsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="avro_rows", - full_name="google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.avro_rows", - index=0, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="arrow_record_batch", - full_name="google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.arrow_record_batch", - index=1, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="row_count", - full_name="google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.row_count", - index=2, - number=6, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status", - full_name="google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.status", - index=3, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="throttle_status", - full_name="google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.throttle_status", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="rows", - full_name="google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.rows", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=2246, - serialized_end=2597, -) - - -_BATCHCREATEREADSESSIONSTREAMSREQUEST = _descriptor.Descriptor( - name="BatchCreateReadSessionStreamsRequest", - full_name="google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="session", - full_name="google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.session", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="requested_streams", - full_name="google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.requested_streams", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2600, - serialized_end=2744, -) - - -_BATCHCREATEREADSESSIONSTREAMSRESPONSE = _descriptor.Descriptor( - name="BatchCreateReadSessionStreamsResponse", - full_name="google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="streams", - full_name="google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.streams", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2746, - serialized_end=2849, -) - - -_FINALIZESTREAMREQUEST = _descriptor.Descriptor( - name="FinalizeStreamRequest", - full_name="google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="stream", - full_name="google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.stream", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2851, - serialized_end=2937, -) - - -_SPLITREADSTREAMREQUEST = _descriptor.Descriptor( - name="SplitReadStreamRequest", - full_name="google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="original_stream", - full_name="google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.original_stream", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fraction", - full_name="google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.fraction", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2939, - serialized_end=3053, -) - - -_SPLITREADSTREAMRESPONSE = _descriptor.Descriptor( - name="SplitReadStreamResponse", - full_name="google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="primary_stream", - full_name="google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.primary_stream", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="remainder_stream", - full_name="google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.remainder_stream", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3056, - serialized_end=3225, -) - -_STREAMPOSITION.fields_by_name["stream"].message_type = _STREAM -_READSESSION.fields_by_name[ - "expire_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_READSESSION.fields_by_name[ - "avro_schema" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_avro__pb2._AVROSCHEMA -) -_READSESSION.fields_by_name[ - "arrow_schema" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_arrow__pb2._ARROWSCHEMA -) -_READSESSION.fields_by_name["streams"].message_type = _STREAM -_READSESSION.fields_by_name[ - "table_reference" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_table__reference__pb2._TABLEREFERENCE -) -_READSESSION.fields_by_name[ - "table_modifiers" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_table__reference__pb2._TABLEMODIFIERS -) -_READSESSION.fields_by_name["sharding_strategy"].enum_type = _SHARDINGSTRATEGY -_READSESSION.oneofs_by_name["schema"].fields.append( - _READSESSION.fields_by_name["avro_schema"] -) -_READSESSION.fields_by_name[ - "avro_schema" -].containing_oneof = _READSESSION.oneofs_by_name["schema"] -_READSESSION.oneofs_by_name["schema"].fields.append( - _READSESSION.fields_by_name["arrow_schema"] -) -_READSESSION.fields_by_name[ - "arrow_schema" -].containing_oneof = _READSESSION.oneofs_by_name["schema"] -_CREATEREADSESSIONREQUEST.fields_by_name[ - "table_reference" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_table__reference__pb2._TABLEREFERENCE -) -_CREATEREADSESSIONREQUEST.fields_by_name[ - "table_modifiers" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_table__reference__pb2._TABLEMODIFIERS -) -_CREATEREADSESSIONREQUEST.fields_by_name[ - "read_options" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_read__options__pb2._TABLEREADOPTIONS -) -_CREATEREADSESSIONREQUEST.fields_by_name["format"].enum_type = _DATAFORMAT -_CREATEREADSESSIONREQUEST.fields_by_name[ - "sharding_strategy" -].enum_type = _SHARDINGSTRATEGY -_READROWSREQUEST.fields_by_name["read_position"].message_type = _STREAMPOSITION -_STREAMSTATUS.fields_by_name["progress"].message_type = _PROGRESS -_READROWSRESPONSE.fields_by_name[ - "avro_rows" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_avro__pb2._AVROROWS -) -_READROWSRESPONSE.fields_by_name[ - "arrow_record_batch" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_arrow__pb2._ARROWRECORDBATCH -) -_READROWSRESPONSE.fields_by_name["status"].message_type = _STREAMSTATUS -_READROWSRESPONSE.fields_by_name["throttle_status"].message_type = _THROTTLESTATUS -_READROWSRESPONSE.oneofs_by_name["rows"].fields.append( - _READROWSRESPONSE.fields_by_name["avro_rows"] -) -_READROWSRESPONSE.fields_by_name[ - "avro_rows" -].containing_oneof = _READROWSRESPONSE.oneofs_by_name["rows"] -_READROWSRESPONSE.oneofs_by_name["rows"].fields.append( - _READROWSRESPONSE.fields_by_name["arrow_record_batch"] -) -_READROWSRESPONSE.fields_by_name[ - "arrow_record_batch" -].containing_oneof = _READROWSRESPONSE.oneofs_by_name["rows"] -_BATCHCREATEREADSESSIONSTREAMSREQUEST.fields_by_name[ - "session" -].message_type = _READSESSION -_BATCHCREATEREADSESSIONSTREAMSRESPONSE.fields_by_name["streams"].message_type = _STREAM -_FINALIZESTREAMREQUEST.fields_by_name["stream"].message_type = _STREAM -_SPLITREADSTREAMREQUEST.fields_by_name["original_stream"].message_type = _STREAM -_SPLITREADSTREAMRESPONSE.fields_by_name["primary_stream"].message_type = _STREAM -_SPLITREADSTREAMRESPONSE.fields_by_name["remainder_stream"].message_type = _STREAM -DESCRIPTOR.message_types_by_name["Stream"] = _STREAM -DESCRIPTOR.message_types_by_name["StreamPosition"] = _STREAMPOSITION -DESCRIPTOR.message_types_by_name["ReadSession"] = _READSESSION -DESCRIPTOR.message_types_by_name["CreateReadSessionRequest"] = _CREATEREADSESSIONREQUEST -DESCRIPTOR.message_types_by_name["ReadRowsRequest"] = _READROWSREQUEST -DESCRIPTOR.message_types_by_name["StreamStatus"] = _STREAMSTATUS -DESCRIPTOR.message_types_by_name["Progress"] = _PROGRESS -DESCRIPTOR.message_types_by_name["ThrottleStatus"] = _THROTTLESTATUS -DESCRIPTOR.message_types_by_name["ReadRowsResponse"] = _READROWSRESPONSE -DESCRIPTOR.message_types_by_name[ - "BatchCreateReadSessionStreamsRequest" -] = _BATCHCREATEREADSESSIONSTREAMSREQUEST -DESCRIPTOR.message_types_by_name[ - "BatchCreateReadSessionStreamsResponse" -] = _BATCHCREATEREADSESSIONSTREAMSRESPONSE -DESCRIPTOR.message_types_by_name["FinalizeStreamRequest"] = _FINALIZESTREAMREQUEST -DESCRIPTOR.message_types_by_name["SplitReadStreamRequest"] = _SPLITREADSTREAMREQUEST -DESCRIPTOR.message_types_by_name["SplitReadStreamResponse"] = _SPLITREADSTREAMRESPONSE -DESCRIPTOR.enum_types_by_name["DataFormat"] = _DATAFORMAT -DESCRIPTOR.enum_types_by_name["ShardingStrategy"] = _SHARDINGSTRATEGY -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Stream = _reflection.GeneratedProtocolMessageType( - "Stream", - (_message.Message,), - dict( - DESCRIPTOR=_STREAM, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""Information about a single data stream within a read session. - - - Attributes: - name: - Name of the stream, in the form ``projects/{project_id}/locati - ons/{location}/streams/{stream_id}``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.Stream) - ), -) -_sym_db.RegisterMessage(Stream) - -StreamPosition = _reflection.GeneratedProtocolMessageType( - "StreamPosition", - (_message.Message,), - dict( - DESCRIPTOR=_STREAMPOSITION, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""Expresses a point within a given stream using an offset position. - - - Attributes: - stream: - Identifier for a given Stream. - offset: - Position in the stream. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.StreamPosition) - ), -) -_sym_db.RegisterMessage(StreamPosition) - -ReadSession = _reflection.GeneratedProtocolMessageType( - "ReadSession", - (_message.Message,), - dict( - DESCRIPTOR=_READSESSION, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""Information returned from a ``CreateReadSession`` request. - - - Attributes: - name: - Unique identifier for the session, in the form ``projects/{pro - ject_id}/locations/{location}/sessions/{session_id}``. - expire_time: - Time at which the session becomes invalid. After this time, - subsequent requests to read this Session will return errors. - schema: - The schema for the read. If read\_options.selected\_fields is - set, the schema may be different from the table schema as it - will only contain the selected fields. - avro_schema: - Avro schema. - arrow_schema: - Arrow schema. - streams: - Streams associated with this session. - table_reference: - Table that this ReadSession is reading from. - table_modifiers: - Any modifiers which are applied when reading from the - specified table. - sharding_strategy: - The strategy to use for distributing data among the streams. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.ReadSession) - ), -) -_sym_db.RegisterMessage(ReadSession) - -CreateReadSessionRequest = _reflection.GeneratedProtocolMessageType( - "CreateReadSessionRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATEREADSESSIONREQUEST, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""Creates a new read session, which may include additional options such as - requested parallelism, projection filters and constraints. - - - Attributes: - table_reference: - Required. Reference to the table to read. - parent: - Required. String of the form ``projects/{project_id}`` - indicating the project this ReadSession is associated with. - This is the project that will be billed for usage. - table_modifiers: - Any modifiers to the Table (e.g. snapshot timestamp). - requested_streams: - Initial number of streams. If unset or 0, we will provide a - value of streams so as to produce reasonable throughput. Must - be non-negative. The number of streams may be lower than the - requested number, depending on the amount parallelism that is - reasonable for the table and the maximum amount of parallelism - allowed by the system. Streams must be read starting from - offset 0. - read_options: - Read options for this session (e.g. column selection, - filters). - format: - Data output format. Currently default to Avro. - sharding_strategy: - The strategy to use for distributing data among multiple - streams. Currently defaults to liquid sharding. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest) - ), -) -_sym_db.RegisterMessage(CreateReadSessionRequest) - -ReadRowsRequest = _reflection.GeneratedProtocolMessageType( - "ReadRowsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_READROWSREQUEST, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""Requesting row data via ``ReadRows`` must provide Stream position - information. - - - Attributes: - read_position: - Required. Identifier of the position in the stream to start - reading from. The offset requested must be less than the last - row read from ReadRows. Requesting a larger offset is - undefined. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.ReadRowsRequest) - ), -) -_sym_db.RegisterMessage(ReadRowsRequest) - -StreamStatus = _reflection.GeneratedProtocolMessageType( - "StreamStatus", - (_message.Message,), - dict( - DESCRIPTOR=_STREAMSTATUS, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""Progress information for a given Stream. - - - Attributes: - estimated_row_count: - Number of estimated rows in the current stream. May change - over time as different readers in the stream progress at rates - which are relatively fast or slow. - fraction_consumed: - A value in the range [0.0, 1.0] that represents the fraction - of rows assigned to this stream that have been processed by - the server. In the presence of read filters, the server may - process more rows than it returns, so this value reflects - progress through the pre-filtering rows. This value is only - populated for sessions created through the BALANCED sharding - strategy. - progress: - Represents the progress of the current stream. Note: This - value is under development and should not be used. Use - ``fraction_consumed`` instead. - is_splittable: - Whether this stream can be split. For sessions that use the - LIQUID sharding strategy, this value is always false. For - BALANCED sessions, this value is false when enough data have - been read such that no more splits are possible at that point - or beyond. For small tables or streams that are the result of - a chain of splits, this value may never be true. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.StreamStatus) - ), -) -_sym_db.RegisterMessage(StreamStatus) - -Progress = _reflection.GeneratedProtocolMessageType( - "Progress", - (_message.Message,), - dict( - DESCRIPTOR=_PROGRESS, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""Protocol buffer. - - Attributes: - at_response_start: - The fraction of rows assigned to the stream that have been - processed by the server so far, not including the rows in the - current response message. This value, along with - ``at_response_end``, can be used to interpolate the progress - made as the rows in the message are being processed using the - following formula: ``at_response_start + (at_response_end - - at_response_start) * rows_processed_from_response / - rows_in_response``. Note that if a filter is provided, the - ``at_response_end`` value of the previous response may not - necessarily be equal to the ``at_response_start`` value of the - current response. - at_response_end: - Similar to ``at_response_start``, except that this value - includes the rows in the current response. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.Progress) - ), -) -_sym_db.RegisterMessage(Progress) - -ThrottleStatus = _reflection.GeneratedProtocolMessageType( - "ThrottleStatus", - (_message.Message,), - dict( - DESCRIPTOR=_THROTTLESTATUS, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""Information on if the current connection is being throttled. - - - Attributes: - throttle_percent: - How much this connection is being throttled. 0 is no - throttling, 100 is completely throttled. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.ThrottleStatus) - ), -) -_sym_db.RegisterMessage(ThrottleStatus) - -ReadRowsResponse = _reflection.GeneratedProtocolMessageType( - "ReadRowsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_READROWSRESPONSE, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""Response from calling ``ReadRows`` may include row data, progress and - throttling information. - - - Attributes: - rows: - Row data is returned in format specified during session - creation. - avro_rows: - Serialized row data in AVRO format. - arrow_record_batch: - Serialized row data in Arrow RecordBatch format. - row_count: - Number of serialized rows in the rows block. This value is - recorded here, in addition to the row\_count values in the - output-specific messages in ``rows``, so that code which needs - to record progress through the stream can do so in an output - format-independent way. - status: - Estimated stream statistics. - throttle_status: - Throttling status. If unset, the latest response still - describes the current throttling status. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.ReadRowsResponse) - ), -) -_sym_db.RegisterMessage(ReadRowsResponse) - -BatchCreateReadSessionStreamsRequest = _reflection.GeneratedProtocolMessageType( - "BatchCreateReadSessionStreamsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_BATCHCREATEREADSESSIONSTREAMSREQUEST, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""Information needed to request additional streams for an established read - session. - - - Attributes: - session: - Required. Must be a non-expired session obtained from a call - to CreateReadSession. Only the name field needs to be set. - requested_streams: - Required. Number of new streams requested. Must be positive. - Number of added streams may be less than this, see - CreateReadSessionRequest for more information. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest) - ), -) -_sym_db.RegisterMessage(BatchCreateReadSessionStreamsRequest) - -BatchCreateReadSessionStreamsResponse = _reflection.GeneratedProtocolMessageType( - "BatchCreateReadSessionStreamsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_BATCHCREATEREADSESSIONSTREAMSRESPONSE, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""The response from ``BatchCreateReadSessionStreams`` returns the stream - identifiers for the newly created streams. - - - Attributes: - streams: - Newly added streams. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse) - ), -) -_sym_db.RegisterMessage(BatchCreateReadSessionStreamsResponse) - -FinalizeStreamRequest = _reflection.GeneratedProtocolMessageType( - "FinalizeStreamRequest", - (_message.Message,), - dict( - DESCRIPTOR=_FINALIZESTREAMREQUEST, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""Request information for invoking ``FinalizeStream``. - - - Attributes: - stream: - Stream to finalize. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest) - ), -) -_sym_db.RegisterMessage(FinalizeStreamRequest) - -SplitReadStreamRequest = _reflection.GeneratedProtocolMessageType( - "SplitReadStreamRequest", - (_message.Message,), - dict( - DESCRIPTOR=_SPLITREADSTREAMREQUEST, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""Request information for ``SplitReadStream``. - - - Attributes: - original_stream: - Stream to split. - fraction: - A value in the range (0.0, 1.0) that specifies the fractional - point at which the original stream should be split. The actual - split point is evaluated on pre-filtered rows, so if a filter - is provided, then there is no guarantee that the division of - the rows between the new child streams will be proportional to - this fractional value. Additionally, because the server-side - unit for assigning data is collections of rows, this fraction - will always map to to a data storage boundary on the server - side. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest) - ), -) -_sym_db.RegisterMessage(SplitReadStreamRequest) - -SplitReadStreamResponse = _reflection.GeneratedProtocolMessageType( - "SplitReadStreamResponse", - (_message.Message,), - dict( - DESCRIPTOR=_SPLITREADSTREAMRESPONSE, - __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", - __doc__="""Response from ``SplitReadStream``. - - - Attributes: - primary_stream: - Primary stream, which contains the beginning portion of - \|original\_stream\|. An empty value indicates that the - original stream can no longer be split. - remainder_stream: - Remainder stream, which contains the tail of - \|original\_stream\|. An empty value indicates that the - original stream can no longer be split. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse) - ), -) -_sym_db.RegisterMessage(SplitReadStreamResponse) - - -DESCRIPTOR._options = None -_STREAM._options = None -_READSESSION._options = None -_CREATEREADSESSIONREQUEST.fields_by_name["table_reference"]._options = None -_CREATEREADSESSIONREQUEST.fields_by_name["parent"]._options = None -_READROWSREQUEST.fields_by_name["read_position"]._options = None -_BATCHCREATEREADSESSIONSTREAMSREQUEST.fields_by_name["session"]._options = None -_BATCHCREATEREADSESSIONSTREAMSREQUEST.fields_by_name[ - "requested_streams" -]._options = None - -_BIGQUERYSTORAGE = _descriptor.ServiceDescriptor( - name="BigQueryStorage", - full_name="google.cloud.bigquery.storage.v1beta1.BigQueryStorage", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\036bigquerystorage.googleapis.com\322A\211\001https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform" - ), - serialized_start=3373, - serialized_end=4760, - methods=[ - _descriptor.MethodDescriptor( - name="CreateReadSession", - full_name="google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSession", - index=0, - containing_service=None, - input_type=_CREATEREADSESSIONREQUEST, - output_type=_READSESSION, - serialized_options=_b( - '\202\323\344\223\002w"0/v1beta1/{table_reference.project_id=projects/*}:\001*Z@";/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}:\001*\332A(table_reference,parent,requested_streams' - ), - ), - _descriptor.MethodDescriptor( - name="ReadRows", - full_name="google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRows", - index=1, - containing_service=None, - input_type=_READROWSREQUEST, - output_type=_READROWSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002;\0229/v1beta1/{read_position.stream.name=projects/*/streams/*}\332A\rread_position" - ), - ), - _descriptor.MethodDescriptor( - name="BatchCreateReadSessionStreams", - full_name="google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreams", - index=2, - containing_service=None, - input_type=_BATCHCREATEREADSESSIONSTREAMSREQUEST, - output_type=_BATCHCREATEREADSESSIONSTREAMSRESPONSE, - serialized_options=_b( - '\202\323\344\223\0022"-/v1beta1/{session.name=projects/*/sessions/*}:\001*\332A\031session,requested_streams' - ), - ), - _descriptor.MethodDescriptor( - name="FinalizeStream", - full_name="google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStream", - index=3, - containing_service=None, - input_type=_FINALIZESTREAMREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\0020"+/v1beta1/{stream.name=projects/*/streams/*}:\001*\332A\006stream' - ), - ), - _descriptor.MethodDescriptor( - name="SplitReadStream", - full_name="google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStream", - index=4, - containing_service=None, - input_type=_SPLITREADSTREAMREQUEST, - output_type=_SPLITREADSTREAMRESPONSE, - serialized_options=_b( - "\202\323\344\223\0026\0224/v1beta1/{original_stream.name=projects/*/streams/*}\332A\017original_stream" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_BIGQUERYSTORAGE) - -DESCRIPTOR.services_by_name["BigQueryStorage"] = _BIGQUERYSTORAGE - -# @@protoc_insertion_point(module_scope) diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2_grpc.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2_grpc.py deleted file mode 100644 index 85b890f0e0cc..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2_grpc.py +++ /dev/null @@ -1,168 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.bigquery_storage_v1beta1.proto import ( - storage_pb2 as google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class BigQueryStorageStub(object): - """BigQuery storage API. - - The BigQuery storage API can be used to read data stored in BigQuery. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateReadSession = channel.unary_unary( - "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/CreateReadSession", - request_serializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.CreateReadSessionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.ReadSession.FromString, - ) - self.ReadRows = channel.unary_stream( - "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/ReadRows", - request_serializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.ReadRowsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.ReadRowsResponse.FromString, - ) - self.BatchCreateReadSessionStreams = channel.unary_unary( - "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/BatchCreateReadSessionStreams", - request_serializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.BatchCreateReadSessionStreamsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.BatchCreateReadSessionStreamsResponse.FromString, - ) - self.FinalizeStream = channel.unary_unary( - "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/FinalizeStream", - request_serializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.FinalizeStreamRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.SplitReadStream = channel.unary_unary( - "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/SplitReadStream", - request_serializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.SplitReadStreamRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.SplitReadStreamResponse.FromString, - ) - - -class BigQueryStorageServicer(object): - """BigQuery storage API. - - The BigQuery storage API can be used to read data stored in BigQuery. - """ - - def CreateReadSession(self, request, context): - """Creates a new read session. A read session divides the contents of a - BigQuery table into one or more streams, which can then be used to read - data from the table. The read session also specifies properties of the - data to be read, such as a list of columns or a push-down filter describing - the rows to be returned. - - A particular row can be read by at most one stream. When the caller has - reached the end of each stream in the session, then all the data in the - table has been read. - - Read sessions automatically expire 24 hours after they are created and do - not require manual clean-up by the caller. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ReadRows(self, request, context): - """Reads rows from the table in the format prescribed by the read session. - Each response contains one or more table rows, up to a maximum of 10 MiB - per response; read requests which attempt to read individual rows larger - than this will fail. - - Each request also returns a set of stream statistics reflecting the - estimated total number of rows in the read stream. This number is computed - based on the total table size and the number of active streams in the read - session, and may change as other streams continue to read data. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def BatchCreateReadSessionStreams(self, request, context): - """Creates additional streams for a ReadSession. This API can be used to - dynamically adjust the parallelism of a batch processing task upwards by - adding additional workers. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def FinalizeStream(self, request, context): - """Triggers the graceful termination of a single stream in a ReadSession. This - API can be used to dynamically adjust the parallelism of a batch processing - task downwards without losing data. - - This API does not delete the stream -- it remains visible in the - ReadSession, and any data processed by the stream is not released to other - streams. However, no additional data will be assigned to the stream once - this call completes. Callers must continue reading data on the stream until - the end of the stream is reached so that data which has already been - assigned to the stream will be processed. - - This method will return an error if there are no other live streams - in the Session, or if SplitReadStream() has been called on the given - Stream. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def SplitReadStream(self, request, context): - """Splits a given read stream into two Streams. These streams are referred to - as the primary and the residual of the split. The original stream can still - be read from in the same manner as before. Both of the returned streams can - also be read from, and the total rows return by both child streams will be - the same as the rows read from the original stream. - - Moreover, the two child streams will be allocated back to back in the - original Stream. Concretely, it is guaranteed that for streams Original, - Primary, and Residual, that Original[0-j] = Primary[0-j] and - Original[j-n] = Residual[0-m] once the streams have been read to - completion. - - This method is guaranteed to be idempotent. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_BigQueryStorageServicer_to_server(servicer, server): - rpc_method_handlers = { - "CreateReadSession": grpc.unary_unary_rpc_method_handler( - servicer.CreateReadSession, - request_deserializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.CreateReadSessionRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.ReadSession.SerializeToString, - ), - "ReadRows": grpc.unary_stream_rpc_method_handler( - servicer.ReadRows, - request_deserializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.ReadRowsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.ReadRowsResponse.SerializeToString, - ), - "BatchCreateReadSessionStreams": grpc.unary_unary_rpc_method_handler( - servicer.BatchCreateReadSessionStreams, - request_deserializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.BatchCreateReadSessionStreamsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.BatchCreateReadSessionStreamsResponse.SerializeToString, - ), - "FinalizeStream": grpc.unary_unary_rpc_method_handler( - servicer.FinalizeStream, - request_deserializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.FinalizeStreamRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "SplitReadStream": grpc.unary_unary_rpc_method_handler( - servicer.SplitReadStream, - request_deserializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.SplitReadStreamRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_storage__pb2.SplitReadStreamResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.bigquery.storage.v1beta1.BigQueryStorage", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference.proto b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference.proto deleted file mode 100644 index a55dc48eb023..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference.proto +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1beta1; - -import "google/api/resource.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; -option java_outer_classname = "TableReferenceProto"; -option java_package = "com.google.cloud.bigquery.storage.v1beta1"; - -// Table reference that includes just the 3 strings needed to identify a table. -message TableReference { - // The assigned project ID of the project. - string project_id = 1; - - // The ID of the dataset in the above project. - string dataset_id = 2; - - // The ID of the table in the above dataset. - string table_id = 3; -} - -// All fields in this message optional. -message TableModifiers { - // The snapshot time of the table. If not set, interpreted as now. - google.protobuf.Timestamp snapshot_time = 1; -} diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2.py deleted file mode 100644 index 992067f07367..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2.py +++ /dev/null @@ -1,203 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery/storage_v1beta1/proto/table_reference.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery/storage_v1beta1/proto/table_reference.proto", - package="google.cloud.bigquery.storage.v1beta1", - syntax="proto3", - serialized_options=_b( - "\n)com.google.cloud.bigquery.storage.v1beta1B\023TableReferenceProtoZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage" - ), - serialized_pb=_b( - '\nAgoogle/cloud/bigquery/storage_v1beta1/proto/table_reference.proto\x12%google.cloud.bigquery.storage.v1beta1\x1a\x19google/api/resource.proto\x1a\x1fgoogle/protobuf/timestamp.proto"J\n\x0eTableReference\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x12\x10\n\x08table_id\x18\x03 \x01(\t"C\n\x0eTableModifiers\x12\x31\n\rsnapshot_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x8e\x01\n)com.google.cloud.bigquery.storage.v1beta1B\x13TableReferenceProtoZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storageb\x06proto3' - ), - dependencies=[ - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_TABLEREFERENCE = _descriptor.Descriptor( - name="TableReference", - full_name="google.cloud.bigquery.storage.v1beta1.TableReference", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.bigquery.storage.v1beta1.TableReference.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dataset_id", - full_name="google.cloud.bigquery.storage.v1beta1.TableReference.dataset_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="table_id", - full_name="google.cloud.bigquery.storage.v1beta1.TableReference.table_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=168, - serialized_end=242, -) - - -_TABLEMODIFIERS = _descriptor.Descriptor( - name="TableModifiers", - full_name="google.cloud.bigquery.storage.v1beta1.TableModifiers", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="snapshot_time", - full_name="google.cloud.bigquery.storage.v1beta1.TableModifiers.snapshot_time", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=244, - serialized_end=311, -) - -_TABLEMODIFIERS.fields_by_name[ - "snapshot_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -DESCRIPTOR.message_types_by_name["TableReference"] = _TABLEREFERENCE -DESCRIPTOR.message_types_by_name["TableModifiers"] = _TABLEMODIFIERS -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -TableReference = _reflection.GeneratedProtocolMessageType( - "TableReference", - (_message.Message,), - dict( - DESCRIPTOR=_TABLEREFERENCE, - __module__="google.cloud.bigquery.storage_v1beta1.proto.table_reference_pb2", - __doc__="""Table reference that includes just the 3 strings needed to identify a - table. - - - Attributes: - project_id: - The assigned project ID of the project. - dataset_id: - The ID of the dataset in the above project. - table_id: - The ID of the table in the above dataset. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.TableReference) - ), -) -_sym_db.RegisterMessage(TableReference) - -TableModifiers = _reflection.GeneratedProtocolMessageType( - "TableModifiers", - (_message.Message,), - dict( - DESCRIPTOR=_TABLEMODIFIERS, - __module__="google.cloud.bigquery.storage_v1beta1.proto.table_reference_pb2", - __doc__="""All fields in this message optional. - - - Attributes: - snapshot_time: - The snapshot time of the table. If not set, interpreted as - now. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.TableModifiers) - ), -) -_sym_db.RegisterMessage(TableModifiers) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2_grpc.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/reader.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/reader.py deleted file mode 100644 index 7e07392d0aea..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/reader.py +++ /dev/null @@ -1,644 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import collections -import json - -try: - import fastavro -except ImportError: # pragma: NO COVER - fastavro = None -import google.api_core.exceptions - -try: - import pandas -except ImportError: # pragma: NO COVER - pandas = None -try: - import pyarrow -except ImportError: # pragma: NO COVER - pyarrow = None -import six - -try: - import pyarrow -except ImportError: # pragma: NO COVER - pyarrow = None - -from google.cloud.bigquery_storage_v1beta1 import types - - -_STREAM_RESUMPTION_EXCEPTIONS = (google.api_core.exceptions.ServiceUnavailable,) - -# The Google API endpoint can unexpectedly close long-running HTTP/2 streams. -# Unfortunately, this condition is surfaced to the caller as an internal error -# by gRPC. We don't want to resume on all internal errors, so instead we look -# for error message that we know are caused by problems that are safe to -# reconnect. -_STREAM_RESUMPTION_INTERNAL_ERROR_MESSAGES = ( - # See: https://github.com/googleapis/google-cloud-python/pull/9994 - "RST_STREAM", -) - -_FASTAVRO_REQUIRED = ( - "fastavro is required to parse ReadRowResponse messages with Avro bytes." -) -_PANDAS_REQUIRED = "pandas is required to create a DataFrame" -_PYARROW_REQUIRED = ( - "pyarrow is required to parse ReadRowResponse messages with Arrow bytes." -) - - -class ReadRowsStream(object): - """A stream of results from a read rows request. - - This stream is an iterable of - :class:`~google.cloud.bigquery_storage_v1beta1.types.ReadRowsResponse`. - Iterate over it to fetch all row messages. - - If the fastavro library is installed, use the - :func:`~google.cloud.bigquery_storage_v1beta1.reader.ReadRowsStream.rows()` - method to parse all messages into a stream of row dictionaries. - - If the pandas and fastavro libraries are installed, use the - :func:`~google.cloud.bigquery_storage_v1beta1.reader.ReadRowsStream.to_dataframe()` - method to parse all messages into a :class:`pandas.DataFrame`. - """ - - def __init__(self, wrapped, client, read_position, read_rows_kwargs): - """Construct a ReadRowsStream. - - Args: - wrapped (Iterable[ \ - ~google.cloud.bigquery_storage_v1beta1.types.ReadRowsResponse \ - ]): - The ReadRows stream to read. - client ( \ - ~google.cloud.bigquery_storage_v1beta1.gapic. \ - big_query_storage_client.BigQueryStorageClient \ - ): - A GAPIC client used to reconnect to a ReadRows stream. This - must be the GAPIC client to avoid a circular dependency on - this class. - read_position (Union[ \ - dict, \ - ~google.cloud.bigquery_storage_v1beta1.types.StreamPosition \ - ]): - Required. Identifier of the position in the stream to start - reading from. The offset requested must be less than the last - row read from ReadRows. Requesting a larger offset is - undefined. If a dict is provided, it must be of the same form - as the protobuf message - :class:`~google.cloud.bigquery_storage_v1beta1.types.StreamPosition` - read_rows_kwargs (dict): - Keyword arguments to use when reconnecting to a ReadRows - stream. - - Returns: - Iterable[ \ - ~google.cloud.bigquery_storage_v1beta1.types.ReadRowsResponse \ - ]: - A sequence of row messages. - """ - - # Make a copy of the read position so that we can update it without - # mutating the original input. - self._position = _copy_stream_position(read_position) - self._client = client - self._wrapped = wrapped - self._read_rows_kwargs = read_rows_kwargs - - def __iter__(self): - """An iterable of messages. - - Returns: - Iterable[ \ - ~google.cloud.bigquery_storage_v1beta1.types.ReadRowsResponse \ - ]: - A sequence of row messages. - """ - - # Infinite loop to reconnect on reconnectable errors while processing - # the row stream. - while True: - try: - for message in self._wrapped: - rowcount = message.row_count - self._position.offset += rowcount - yield message - - return # Made it through the whole stream. - except google.api_core.exceptions.InternalServerError as exc: - resumable_error = any( - resumable_message in exc.message - for resumable_message in _STREAM_RESUMPTION_INTERNAL_ERROR_MESSAGES - ) - if not resumable_error: - raise - except _STREAM_RESUMPTION_EXCEPTIONS: - # Transient error, so reconnect to the stream. - pass - - self._reconnect() - - def _reconnect(self): - """Reconnect to the ReadRows stream using the most recent offset.""" - self._wrapped = self._client.read_rows( - _copy_stream_position(self._position), **self._read_rows_kwargs - ) - - def rows(self, read_session): - """Iterate over all rows in the stream. - - This method requires the fastavro library in order to parse row - messages. - - .. warning:: - DATETIME columns are not supported. They are currently parsed as - strings in the fastavro library. - - Args: - read_session ( \ - ~google.cloud.bigquery_storage_v1beta1.types.ReadSession \ - ): - The read session associated with this read rows stream. This - contains the schema, which is required to parse the data - messages. - - Returns: - Iterable[Mapping]: - A sequence of rows, represented as dictionaries. - """ - return ReadRowsIterable(self, read_session) - - def to_arrow(self, read_session): - """Create a :class:`pyarrow.Table` of all rows in the stream. - - This method requires the pyarrow library and a stream using the Arrow - format. - - Args: - read_session ( \ - ~google.cloud.bigquery_storage_v1beta1.types.ReadSession \ - ): - The read session associated with this read rows stream. This - contains the schema, which is required to parse the data - messages. - - Returns: - pyarrow.Table: - A table of all rows in the stream. - """ - return self.rows(read_session).to_arrow() - - def to_dataframe(self, read_session, dtypes=None): - """Create a :class:`pandas.DataFrame` of all rows in the stream. - - This method requires the pandas libary to create a data frame and the - fastavro library to parse row messages. - - .. warning:: - DATETIME columns are not supported. They are currently parsed as - strings. - - Args: - read_session ( \ - ~google.cloud.bigquery_storage_v1beta1.types.ReadSession \ - ): - The read session associated with this read rows stream. This - contains the schema, which is required to parse the data - messages. - dtypes ( \ - Map[str, Union[str, pandas.Series.dtype]] \ - ): - Optional. A dictionary of column names pandas ``dtype``s. The - provided ``dtype`` is used when constructing the series for - the column specified. Otherwise, the default pandas behavior - is used. - - Returns: - pandas.DataFrame: - A data frame of all rows in the stream. - """ - if pandas is None: - raise ImportError(_PANDAS_REQUIRED) - - return self.rows(read_session).to_dataframe(dtypes=dtypes) - - -class ReadRowsIterable(object): - """An iterable of rows from a read session. - - Args: - reader (google.cloud.bigquery_storage_v1beta1.reader.ReadRowsStream): - A read rows stream. - read_session (google.cloud.bigquery_storage_v1beta1.types.ReadSession): - A read session. This is required because it contains the schema - used in the stream messages. - """ - - # This class is modelled after the google.cloud.bigquery.table.RowIterator - # and aims to be API compatible where possible. - - def __init__(self, reader, read_session): - self._status = None - self._reader = reader - self._read_session = read_session - self._stream_parser = _StreamParser.from_read_session(self._read_session) - - @property - def total_rows(self): - """int: Number of estimated rows in the current stream. - - May change over time. - """ - return getattr(self._status, "estimated_row_count", None) - - @property - def pages(self): - """A generator of all pages in the stream. - - Returns: - types.GeneratorType[google.cloud.bigquery_storage_v1beta1.ReadRowsPage]: - A generator of pages. - """ - # Each page is an iterator of rows. But also has num_items, remaining, - # and to_dataframe. - for message in self._reader: - self._status = message.status - yield ReadRowsPage(self._stream_parser, message) - - def __iter__(self): - """Iterator for each row in all pages.""" - for page in self.pages: - for row in page: - yield row - - def to_arrow(self): - """Create a :class:`pyarrow.Table` of all rows in the stream. - - This method requires the pyarrow library and a stream using the Arrow - format. - - Returns: - pyarrow.Table: - A table of all rows in the stream. - """ - record_batches = [] - for page in self.pages: - record_batches.append(page.to_arrow()) - return pyarrow.Table.from_batches(record_batches) - - def to_dataframe(self, dtypes=None): - """Create a :class:`pandas.DataFrame` of all rows in the stream. - - This method requires the pandas libary to create a data frame and the - fastavro library to parse row messages. - - .. warning:: - DATETIME columns are not supported. They are currently parsed as - strings in the fastavro library. - - Args: - dtypes ( \ - Map[str, Union[str, pandas.Series.dtype]] \ - ): - Optional. A dictionary of column names pandas ``dtype``s. The - provided ``dtype`` is used when constructing the series for - the column specified. Otherwise, the default pandas behavior - is used. - - Returns: - pandas.DataFrame: - A data frame of all rows in the stream. - """ - if pandas is None: - raise ImportError(_PANDAS_REQUIRED) - - if dtypes is None: - dtypes = {} - - # If it's an Arrow stream, calling to_arrow, then converting to a - # pandas dataframe is about 2x faster. This is because pandas.concat is - # rarely no-copy, whereas pyarrow.Table.from_batches + to_pandas is - # usually no-copy. - schema_type = self._read_session.WhichOneof("schema") - if schema_type == "arrow_schema": - record_batch = self.to_arrow() - df = record_batch.to_pandas() - for column in dtypes: - df[column] = pandas.Series(df[column], dtype=dtypes[column]) - return df - - frames = [] - for page in self.pages: - frames.append(page.to_dataframe(dtypes=dtypes)) - return pandas.concat(frames) - - -class ReadRowsPage(object): - """An iterator of rows from a read session message. - - Args: - stream_parser (google.cloud.bigquery_storage_v1beta1.reader._StreamParser): - A helper for parsing messages into rows. - message (google.cloud.bigquery_storage_v1beta1.types.ReadRowsResponse): - A message of data from a read rows stream. - """ - - # This class is modeled after google.api_core.page_iterator.Page and aims - # to provide API compatibility where possible. - - def __init__(self, stream_parser, message): - self._stream_parser = stream_parser - self._message = message - self._iter_rows = None - self._num_items = self._message.row_count - self._remaining = self._message.row_count - - def _parse_rows(self): - """Parse rows from the message only once.""" - if self._iter_rows is not None: - return - - rows = self._stream_parser.to_rows(self._message) - self._iter_rows = iter(rows) - - @property - def num_items(self): - """int: Total items in the page.""" - return self._num_items - - @property - def remaining(self): - """int: Remaining items in the page.""" - return self._remaining - - def __iter__(self): - """A ``ReadRowsPage`` is an iterator.""" - return self - - def next(self): - """Get the next row in the page.""" - self._parse_rows() - if self._remaining > 0: - self._remaining -= 1 - return six.next(self._iter_rows) - - # Alias needed for Python 2/3 support. - __next__ = next - - def to_arrow(self): - """Create an :class:`pyarrow.RecordBatch` of rows in the page. - - Returns: - pyarrow.RecordBatch: - Rows from the message, as an Arrow record batch. - """ - return self._stream_parser.to_arrow(self._message) - - def to_dataframe(self, dtypes=None): - """Create a :class:`pandas.DataFrame` of rows in the page. - - This method requires the pandas libary to create a data frame and the - fastavro library to parse row messages. - - .. warning:: - DATETIME columns are not supported. They are currently parsed as - strings in the fastavro library. - - Args: - dtypes ( \ - Map[str, Union[str, pandas.Series.dtype]] \ - ): - Optional. A dictionary of column names pandas ``dtype``s. The - provided ``dtype`` is used when constructing the series for - the column specified. Otherwise, the default pandas behavior - is used. - - Returns: - pandas.DataFrame: - A data frame of all rows in the stream. - """ - if pandas is None: - raise ImportError(_PANDAS_REQUIRED) - - return self._stream_parser.to_dataframe(self._message, dtypes=dtypes) - - -class _StreamParser(object): - def to_arrow(self, message): - raise NotImplementedError("Not implemented.") - - def to_dataframe(self, message, dtypes=None): - raise NotImplementedError("Not implemented.") - - def to_rows(self, message): - raise NotImplementedError("Not implemented.") - - @staticmethod - def from_read_session(read_session): - schema_type = read_session.WhichOneof("schema") - if schema_type == "avro_schema": - return _AvroStreamParser(read_session) - elif schema_type == "arrow_schema": - return _ArrowStreamParser(read_session) - else: - raise TypeError( - "Unsupported schema type in read_session: {0}".format(schema_type) - ) - - -class _AvroStreamParser(_StreamParser): - """Helper to parse Avro messages into useful representations.""" - - def __init__(self, read_session): - """Construct an _AvroStreamParser. - - Args: - read_session (google.cloud.bigquery_storage_v1beta1.types.ReadSession): - A read session. This is required because it contains the schema - used in the stream messages. - """ - if fastavro is None: - raise ImportError(_FASTAVRO_REQUIRED) - - self._read_session = read_session - self._avro_schema_json = None - self._fastavro_schema = None - self._column_names = None - - def to_arrow(self, message): - """Create an :class:`pyarrow.RecordBatch` of rows in the page. - - Args: - message (google.cloud.bigquery_storage_v1beta1.types.ReadRowsResponse): - Protocol buffer from the read rows stream, to convert into an - Arrow record batch. - - Returns: - pyarrow.RecordBatch: - Rows from the message, as an Arrow record batch. - """ - raise NotImplementedError("to_arrow not implemented for Avro streams.") - - def to_dataframe(self, message, dtypes=None): - """Create a :class:`pandas.DataFrame` of rows in the page. - - This method requires the pandas libary to create a data frame and the - fastavro library to parse row messages. - - .. warning:: - DATETIME columns are not supported. They are currently parsed as - strings in the fastavro library. - - Args: - dtypes ( \ - Map[str, Union[str, pandas.Series.dtype]] \ - ): - Optional. A dictionary of column names pandas ``dtype``s. The - provided ``dtype`` is used when constructing the series for - the column specified. Otherwise, the default pandas behavior - is used. - - Returns: - pandas.DataFrame: - A data frame of all rows in the stream. - """ - self._parse_avro_schema() - - if dtypes is None: - dtypes = {} - - columns = collections.defaultdict(list) - for row in self.to_rows(message): - for column in row: - columns[column].append(row[column]) - for column in dtypes: - columns[column] = pandas.Series(columns[column], dtype=dtypes[column]) - return pandas.DataFrame(columns, columns=self._column_names) - - def _parse_avro_schema(self): - """Extract and parse Avro schema from a read session.""" - if self._avro_schema_json: - return - - self._avro_schema_json = json.loads(self._read_session.avro_schema.schema) - self._column_names = tuple( - (field["name"] for field in self._avro_schema_json["fields"]) - ) - - def _parse_fastavro(self): - """Convert parsed Avro schema to fastavro format.""" - self._parse_avro_schema() - self._fastavro_schema = fastavro.parse_schema(self._avro_schema_json) - - def to_rows(self, message): - """Parse all rows in a stream message. - - Args: - message ( \ - ~google.cloud.bigquery_storage_v1beta1.types.ReadRowsResponse \ - ): - A message containing Avro bytes to parse into rows. - - Returns: - Iterable[Mapping]: - A sequence of rows, represented as dictionaries. - """ - self._parse_fastavro() - messageio = six.BytesIO(message.avro_rows.serialized_binary_rows) - while True: - # Loop in a while loop because schemaless_reader can only read - # a single record. - try: - # TODO: Parse DATETIME into datetime.datetime (no timezone), - # instead of as a string. - yield fastavro.schemaless_reader(messageio, self._fastavro_schema) - except StopIteration: - break # Finished with message - - -class _ArrowStreamParser(_StreamParser): - def __init__(self, read_session): - if pyarrow is None: - raise ImportError(_PYARROW_REQUIRED) - - self._read_session = read_session - self._schema = None - - def to_arrow(self, message): - return self._parse_arrow_message(message) - - def to_rows(self, message): - record_batch = self._parse_arrow_message(message) - - # Iterate through each column simultaneously, and make a dict from the - # row values - for row in zip(*record_batch.columns): - yield dict(zip(self._column_names, row)) - - def to_dataframe(self, message, dtypes=None): - record_batch = self._parse_arrow_message(message) - - if dtypes is None: - dtypes = {} - - df = record_batch.to_pandas() - - for column in dtypes: - df[column] = pandas.Series(df[column], dtype=dtypes[column]) - - return df - - def _parse_arrow_message(self, message): - self._parse_arrow_schema() - - return pyarrow.read_record_batch( - pyarrow.py_buffer(message.arrow_record_batch.serialized_record_batch), - self._schema, - ) - - def _parse_arrow_schema(self): - if self._schema: - return - - self._schema = pyarrow.read_schema( - pyarrow.py_buffer(self._read_session.arrow_schema.serialized_schema) - ) - self._column_names = [field.name for field in self._schema] - - -def _copy_stream_position(position): - """Copy a StreamPosition. - - Args: - position (Union[ \ - dict, \ - ~google.cloud.bigquery_storage_v1beta1.types.StreamPosition \ - ]): - StreamPostion (or dictionary in StreamPosition format) to copy. - - Returns: - ~google.cloud.bigquery_storage_v1beta1.types.StreamPosition: - A copy of the input StreamPostion. - """ - if isinstance(position, types.StreamPosition): - output = types.StreamPosition() - output.CopyFrom(position) - return output - - return types.StreamPosition(**position) diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/types.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/types.py deleted file mode 100644 index fd5a3e35c830..000000000000 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/types.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys - -from google.api_core.protobuf_helpers import get_messages - -from google.cloud.bigquery_storage_v1beta1.proto import arrow_pb2 -from google.cloud.bigquery_storage_v1beta1.proto import avro_pb2 -from google.cloud.bigquery_storage_v1beta1.proto import read_options_pb2 -from google.cloud.bigquery_storage_v1beta1.proto import storage_pb2 -from google.cloud.bigquery_storage_v1beta1.proto import table_reference_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import timestamp_pb2 - - -_shared_modules = [empty_pb2, timestamp_pb2] - -_local_modules = [ - arrow_pb2, - avro_pb2, - read_options_pb2, - storage_pb2, - table_reference_pb2, -] - -names = [] - -for module in _shared_modules: # pragma: NO COVER - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.bigquery_storage_v1beta1.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - - -__all__ = tuple(sorted(names)) diff --git a/bigquery_storage/noxfile.py b/bigquery_storage/noxfile.py deleted file mode 100644 index 79b9d3689512..000000000000 --- a/bigquery_storage/noxfile.py +++ /dev/null @@ -1,182 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! - -from __future__ import absolute_import -import os -import shutil - -import nox - - -LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) -BLACK_VERSION = "black==19.3b0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -if os.path.exists("samples"): - BLACK_PATHS.append("samples") - - -@nox.session(python="3.7") -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) - session.run("black", "--check", *BLACK_PATHS) - session.run("flake8", "google", "tests") - - -@nox.session(python="3.6") -def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ - session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) - - -@nox.session(python="3.7") -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def default(session): - # Install all test dependencies, then install this package in-place. - session.install("mock", "pytest", "pytest-cov") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", ".") - session.install("-e", ".[fastavro,pandas,pyarrow]") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - "--cov=google.cloud", - "--cov=tests.unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - ) - - -@nox.session(python=["2.7", "3.5", "3.6", "3.7"]) -def unit(session): - """Run the unit test suite.""" - default(session) - - -@nox.session(python=["2.7", "3.7"]) -def system(session): - """Run the system test suite.""" - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", "../test_utils/") - session.install("-e", ".[fastavro,pandas,pyarrow]") - session.install("-e", "../bigquery/") - session.install("-e", ".") - - # Run py.test against the system tests. - if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) - if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) - - -@nox.session(python=["2.7", "3.7"]) -def samples(session): - requirements_path = os.path.join("samples", "requirements.txt") - requirements_exists = os.path.exists(requirements_path) - - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") - - session.install("mock", "pytest") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - if requirements_exists: - session.install("-r", requirements_path) - session.install("-e", ".") - - session.run("py.test", "--quiet", "samples", *session.posargs) - - -@nox.session(python="3.7") -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=79") - - session.run("coverage", "erase") - - -@nox.session(python="3.7") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/bigquery_storage/samples/__init__.py b/bigquery_storage/samples/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery_storage/samples/quickstart.py b/bigquery_storage/samples/quickstart.py deleted file mode 100644 index 8a3b8617b387..000000000000 --- a/bigquery_storage/samples/quickstart.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse - - -def main(project_id="your-project-id", snapshot_millis=0): - # [START bigquerystorage_quickstart] - from google.cloud import bigquery_storage_v1beta1 - - # TODO(developer): Set the project_id variable. - # project_id = 'your-project-id' - # - # The read session is created in this project. This project can be - # different from that which contains the table. - - client = bigquery_storage_v1beta1.BigQueryStorageClient() - - # This example reads baby name data from the public datasets. - table_ref = bigquery_storage_v1beta1.types.TableReference() - table_ref.project_id = "bigquery-public-data" - table_ref.dataset_id = "usa_names" - table_ref.table_id = "usa_1910_current" - - # We limit the output columns to a subset of those allowed in the table, - # and set a simple filter to only report names from the state of - # Washington (WA). - read_options = bigquery_storage_v1beta1.types.TableReadOptions() - read_options.selected_fields.append("name") - read_options.selected_fields.append("number") - read_options.selected_fields.append("state") - read_options.row_restriction = 'state = "WA"' - - # Set a snapshot time if it's been specified. - modifiers = None - if snapshot_millis > 0: - modifiers = bigquery_storage_v1beta1.types.TableModifiers() - modifiers.snapshot_time.FromMilliseconds(snapshot_millis) - - parent = "projects/{}".format(project_id) - session = client.create_read_session( - table_ref, - parent, - table_modifiers=modifiers, - read_options=read_options, - # This API can also deliver data serialized in Apache Arrow format. - # This example leverages Apache Avro. - format_=bigquery_storage_v1beta1.enums.DataFormat.AVRO, - # We use a LIQUID strategy in this example because we only read from a - # single stream. Consider BALANCED if you're consuming multiple streams - # concurrently and want more consistent stream sizes. - sharding_strategy=(bigquery_storage_v1beta1.enums.ShardingStrategy.LIQUID), - ) # API request. - - # We'll use only a single stream for reading data from the table. Because - # of dynamic sharding, this will yield all the rows in the table. However, - # if you wanted to fan out multiple readers you could do so by having a - # reader process each individual stream. - reader = client.read_rows( - bigquery_storage_v1beta1.types.StreamPosition(stream=session.streams[0]) - ) - - # The read stream contains blocks of Avro-encoded bytes. The rows() method - # uses the fastavro library to parse these blocks as an interable of Python - # dictionaries. Install fastavro with the following command: - # - # pip install google-cloud-bigquery-storage[fastavro] - rows = reader.rows(session) - - # Do any local processing by iterating over the rows. The - # google-cloud-bigquery-storage client reconnects to the API after any - # transient network errors or timeouts. - names = set() - states = set() - - for row in rows: - names.add(row["name"]) - states.add(row["state"]) - - print("Got {} unique names in states: {}".format(len(names), states)) - # [END bigquerystorage_quickstart] - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument("project_id") - parser.add_argument("--snapshot_millis", default=0, type=int) - args = parser.parse_args() - main(project_id=args.project_id) diff --git a/bigquery_storage/samples/requirements.txt b/bigquery_storage/samples/requirements.txt deleted file mode 100644 index acd0800e713e..000000000000 --- a/bigquery_storage/samples/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -fastavro \ No newline at end of file diff --git a/bigquery_storage/samples/tests/__init__.py b/bigquery_storage/samples/tests/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery_storage/samples/tests/quickstart_test.py b/bigquery_storage/samples/tests/quickstart_test.py deleted file mode 100644 index fde039f4620b..000000000000 --- a/bigquery_storage/samples/tests/quickstart_test.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import os - -import pytest - -from .. import quickstart - - -def now_millis(): - return int( - (datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds() - * 1000 - ) - - -@pytest.fixture() -def project_id(): - return os.environ["PROJECT_ID"] - - -def test_quickstart_wo_snapshot(capsys, project_id): - quickstart.main(project_id) - out, _ = capsys.readouterr() - assert "WA" in out - - -def test_quickstart_with_snapshot(capsys, project_id): - quickstart.main(project_id, now_millis() - 5000) - out, _ = capsys.readouterr() - assert "WA" in out diff --git a/bigquery_storage/setup.cfg b/bigquery_storage/setup.cfg deleted file mode 100644 index 3bd555500e37..000000000000 --- a/bigquery_storage/setup.cfg +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 diff --git a/bigquery_storage/setup.py b/bigquery_storage/setup.py deleted file mode 100644 index 29b968a59501..000000000000 --- a/bigquery_storage/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import io -import os - -import setuptools - -name = "google-cloud-bigquery-storage" -description = "BigQuery Storage API API client library" -version = "0.7.0" -release_status = "Development Status :: 4 - Beta" -dependencies = [ - "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", - 'enum34; python_version < "3.4"', -] -extras = { - "pandas": "pandas>=0.17.1", - "fastavro": "fastavro>=0.21.2", - "pyarrow": "pyarrow>=0.13.0, != 0.14.0", -} - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package for package in setuptools.find_packages() if package.startswith("google") -] - -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url="https://github.com/GoogleCloudPlatform/google-cloud-python", - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - namespace_packages=namespaces, - install_requires=dependencies, - extras_require=extras, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", - include_package_data=True, - zip_safe=False, -) diff --git a/bigquery_storage/synth.metadata b/bigquery_storage/synth.metadata deleted file mode 100644 index 804d6c3b3362..000000000000 --- a/bigquery_storage/synth.metadata +++ /dev/null @@ -1,39 +0,0 @@ -{ - "updateTime": "2019-11-12T13:17:12.571598Z", - "sources": [ - { - "generator": { - "name": "artman", - "version": "0.41.1", - "dockerImage": "googleapis/artman@sha256:545c758c76c3f779037aa259023ec3d1ef2d57d2c8cd00a222cb187d63ceac5e" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f69562be0608904932bdcfbc5ad8b9a22d9dceb8", - "internalRef": "279774957" - } - }, - { - "template": { - "name": "python_library", - "origin": "synthtool.gcp", - "version": "2019.10.17" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "bigquery_storage", - "apiVersion": "v1beta1", - "language": "python", - "generator": "gapic", - "config": "google/cloud/bigquery/storage/artman_bigquerystorage_v1beta1.yaml" - } - } - ] -} \ No newline at end of file diff --git a/bigquery_storage/synth.py b/bigquery_storage/synth.py deleted file mode 100644 index 0866ae8eeb98..000000000000 --- a/bigquery_storage/synth.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import synthtool as s -from synthtool import gcp - -gapic = gcp.GAPICGenerator() -common = gcp.CommonTemplates() -version = "v1beta1" - -library = gapic.py_library( - "bigquery_storage", - version, - config_path="/google/cloud/bigquery/storage/" "artman_bigquerystorage_v1beta1.yaml", - artman_output_name="bigquerystorage-v1beta1", - include_protos=True, -) - -s.move( - library, - excludes=[ - "docs/conf.py", - "docs/index.rst", - "google/cloud/bigquery_storage_v1beta1/__init__.py", - "README.rst", - "nox*.py", - "setup.py", - "setup.cfg", - ], -) - -s.replace( - [ - "google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py", - "google/cloud/bigquery_storage_v1beta1/proto/storage_pb2_grpc.py", - ], - "from google.cloud.bigquery.storage_v1beta1.proto", - "from google.cloud.bigquery_storage_v1beta1.proto", -) - -s.replace( - "google/cloud/bigquery_storage_v1beta1/gapic/" "big_query_storage_client.py", - "google-cloud-bigquerystorage", - "google-cloud-bigquery-storage", -) - -s.replace( - "google/cloud/bigquery_storage_v1beta1/gapic/" "big_query_storage_client.py", - "import google.api_core.gapic_v1.method\n", - "\g<0>import google.api_core.path_template\n", -) - -s.replace( - ["tests/unit/gapic/v1beta1/test_big_query_storage_client_v1beta1.py"], - "from google.cloud import bigquery_storage_v1beta1", - "from google.cloud.bigquery_storage_v1beta1.gapic import big_query_storage_client # noqa", -) - -s.replace( - ["tests/unit/gapic/v1beta1/test_big_query_storage_client_v1beta1.py"], - "bigquery_storage_v1beta1.BigQueryStorageClient", - "big_query_storage_client.BigQueryStorageClient", -) - -# START: Ignore lint and coverage -s.replace( - ["google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py"], - "if transport:", - "if transport: # pragma: no cover", -) - -s.replace( - ["google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py"], - r"to_grpc_metadata\(\n", - "to_grpc_metadata( # pragma: no cover\n", -) - -s.replace( - ["google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py"], - r"metadata.append\(routing_metadata\)", - "metadata.append(routing_metadata) # pragma: no cover", -) - -s.replace( - [ - "google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py" - ], - "if channel is not None and credentials is not None:", - "if channel is not None and credentials is not None: # pragma: no cover", -) - -s.replace( - [ - "google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py" - ], - "if channel is None:", - "if channel is None: # pragma: no cover", -) - -s.replace( - [ - "google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py" - ], - r"google.api_core.grpc_helpers.create_channel\(", - "google.api_core.grpc_helpers.create_channel( # pragma: no cover", -) - -# Fix up proto docs that are missing summary line. -s.replace( - "google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py", - '"""Attributes:', - '"""Protocol buffer.\n\n Attributes:', -) -# END: Ignore lint and coverage - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -optional_deps = [".[fastavro,pandas,pyarrow]"] -system_test_deps = optional_deps + ["../bigquery/"] -templated_files = common.py_library( - unit_cov_level=79, - cov_level=79, - samples_test=True, - system_test_dependencies=system_test_deps, - unit_test_dependencies=optional_deps, -) -s.move(templated_files) - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/bigquery_storage/tests/system/assets/people_data.csv b/bigquery_storage/tests/system/assets/people_data.csv deleted file mode 100644 index 819adfc4bdf5..000000000000 --- a/bigquery_storage/tests/system/assets/people_data.csv +++ /dev/null @@ -1,6 +0,0 @@ -first_name,last_name,age -John,Doe,42 -Jack,Black,53 -Nick,Sleek,24 -Kevin,Powell,50 -Johnny,Young,2 diff --git a/bigquery_storage/tests/system/conftest.py b/bigquery_storage/tests/system/conftest.py deleted file mode 100644 index 5ca85c1b42c8..000000000000 --- a/bigquery_storage/tests/system/conftest.py +++ /dev/null @@ -1,233 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""System tests for reading rows from tables.""" - -import os -import uuid - -import pytest - -from google.cloud import bigquery_storage_v1beta1 - - -_ASSETS_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "assets") - - -@pytest.fixture(scope="session") -def project_id(): - return os.environ["PROJECT_ID"] - - -@pytest.fixture(scope="session") -def credentials(): - from google.oauth2 import service_account - - # NOTE: the test config in noxfile checks that the env variable is indeed set - filename = os.environ["GOOGLE_APPLICATION_CREDENTIALS"] - return service_account.Credentials.from_service_account_file(filename) - - -@pytest.fixture(scope="session") -def bq_client(credentials): - from google.cloud import bigquery - - return bigquery.Client(credentials=credentials) - - -@pytest.fixture(scope="session") -def dataset(project_id, bq_client): - from google.cloud import bigquery - - unique_suffix = str(uuid.uuid4()).replace("-", "_") - dataset_name = "bq_storage_system_tests_" + unique_suffix - - dataset_id = "{}.{}".format(project_id, dataset_name) - dataset = bigquery.Dataset(dataset_id) - dataset.location = "US" - created_dataset = bq_client.create_dataset(dataset) - - yield created_dataset - - bq_client.delete_dataset(dataset, delete_contents=True) - - -@pytest.fixture(scope="session") -def table(project_id, dataset, bq_client): - from google.cloud import bigquery - - schema = [ - bigquery.SchemaField("first_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("last_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("age", "INTEGER", mode="NULLABLE"), - ] - - table_id = "{}.{}.{}".format(project_id, dataset.dataset_id, "users") - bq_table = bigquery.Table(table_id, schema=schema) - created_table = bq_client.create_table(bq_table) - - yield created_table - - bq_client.delete_table(created_table) - - -@pytest.fixture -def table_with_data_ref(dataset, table, bq_client): - from google.cloud import bigquery - - job_config = bigquery.LoadJobConfig() - job_config.source_format = bigquery.SourceFormat.CSV - job_config.skip_leading_rows = 1 - job_config.schema = table.schema - - filename = os.path.join(_ASSETS_DIR, "people_data.csv") - - with open(filename, "rb") as source_file: - job = bq_client.load_table_from_file(source_file, table, job_config=job_config) - - job.result() # wait for the load to complete - - table_ref = bigquery_storage_v1beta1.types.TableReference() - table_ref.project_id = table.project - table_ref.dataset_id = table.dataset_id - table_ref.table_id = table.table_id - yield table_ref - - # truncate table data - query = "DELETE FROM {}.{} WHERE 1 = 1".format(dataset.dataset_id, table.table_id) - query_job = bq_client.query(query, location="US") - query_job.result() - - -@pytest.fixture -def col_partition_table_ref(project_id, dataset, bq_client): - from google.cloud import bigquery - - schema = [ - bigquery.SchemaField("occurred", "DATE", mode="NULLABLE"), - bigquery.SchemaField("description", "STRING", mode="NULLABLE"), - ] - time_partitioning = bigquery.table.TimePartitioning( - type_=bigquery.table.TimePartitioningType.DAY, field="occurred" - ) - bq_table = bigquery.table.Table( - table_ref="{}.{}.notable_events".format(project_id, dataset.dataset_id), - schema=schema, - ) - bq_table.time_partitioning = time_partitioning - - created_table = bq_client.create_table(bq_table) - - table_ref = bigquery_storage_v1beta1.types.TableReference() - table_ref.project_id = created_table.project - table_ref.dataset_id = created_table.dataset_id - table_ref.table_id = created_table.table_id - yield table_ref - - bq_client.delete_table(created_table) - - -@pytest.fixture -def ingest_partition_table_ref(project_id, dataset, bq_client): - from google.cloud import bigquery - - schema = [ - bigquery.SchemaField("shape", "STRING", mode="NULLABLE"), - bigquery.SchemaField("altitude", "INT64", mode="NULLABLE"), - ] - time_partitioning = bigquery.table.TimePartitioning( - type_=bigquery.table.TimePartitioningType.DAY, - field=None, # use _PARTITIONTIME pseudo column - ) - bq_table = bigquery.table.Table( - table_ref="{}.{}.ufo_sightings".format(project_id, dataset.dataset_id), - schema=schema, - ) - bq_table.time_partitioning = time_partitioning - - created_table = bq_client.create_table(bq_table) - - table_ref = bigquery_storage_v1beta1.types.TableReference() - table_ref.project_id = created_table.project - table_ref.dataset_id = created_table.dataset_id - table_ref.table_id = created_table.table_id - yield table_ref - - bq_client.delete_table(created_table) - - -@pytest.fixture -def all_types_table_ref(project_id, dataset, bq_client): - from google.cloud import bigquery - - schema = [ - bigquery.SchemaField("string_field", "STRING"), - bigquery.SchemaField("bytes_field", "BYTES"), - bigquery.SchemaField("int64_field", "INT64"), - bigquery.SchemaField("float64_field", "FLOAT64"), - bigquery.SchemaField("numeric_field", "NUMERIC"), - bigquery.SchemaField("bool_field", "BOOL"), - bigquery.SchemaField("geography_field", "GEOGRAPHY"), - bigquery.SchemaField( - "person_struct_field", - "STRUCT", - fields=( - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("age", "INT64"), - ), - ), - bigquery.SchemaField("timestamp_field", "TIMESTAMP"), - bigquery.SchemaField("date_field", "DATE"), - bigquery.SchemaField("time_field", "TIME"), - bigquery.SchemaField("datetime_field", "DATETIME"), - bigquery.SchemaField("string_array_field", "STRING", mode="REPEATED"), - ] - bq_table = bigquery.table.Table( - table_ref="{}.{}.complex_records".format(project_id, dataset.dataset_id), - schema=schema, - ) - - created_table = bq_client.create_table(bq_table) - - table_ref = bigquery_storage_v1beta1.types.TableReference() - table_ref.project_id = created_table.project - table_ref.dataset_id = created_table.dataset_id - table_ref.table_id = created_table.table_id - yield table_ref - - bq_client.delete_table(created_table) - - -@pytest.fixture(scope="session") -def client(credentials): - return bigquery_storage_v1beta1.BigQueryStorageClient(credentials=credentials) - - -@pytest.fixture() -def table_reference(): - table_ref = bigquery_storage_v1beta1.types.TableReference() - table_ref.project_id = "bigquery-public-data" - table_ref.dataset_id = "usa_names" - table_ref.table_id = "usa_1910_2013" - return table_ref - - -@pytest.fixture() -def small_table_reference(): - table_ref = bigquery_storage_v1beta1.types.TableReference() - table_ref.project_id = "bigquery-public-data" - table_ref.dataset_id = "utility_us" - table_ref.table_id = "country_code_iso" - return table_ref diff --git a/bigquery_storage/tests/system/test_reader.py b/bigquery_storage/tests/system/test_reader.py deleted file mode 100644 index 2ba1f99fb0a0..000000000000 --- a/bigquery_storage/tests/system/test_reader.py +++ /dev/null @@ -1,456 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""System tests for reading rows from tables.""" - -import copy -import datetime as dt -import decimal -import re - -import pytest -import pytz - -from google.cloud import bigquery -from google.cloud import bigquery_storage_v1beta1 -from google.protobuf import timestamp_pb2 - - -def _to_bq_table_ref(proto_table_ref, partition_suffix=""): - """Converts protobuf table reference to bigquery table reference. - - Args: - proto_table_ref (bigquery_storage_v1beta1.types.TableReference): - A protobuf reference to a table. - partition_suffix (str): - An optional suffix to append to the table_id, useful for selecting - partitions of ingestion-time partitioned tables. - - Returns: - google.cloud.bigquery.table.TableReference - """ - return bigquery.table.TableReference.from_api_repr( - { - "projectId": proto_table_ref.project_id, - "datasetId": proto_table_ref.dataset_id, - "tableId": proto_table_ref.table_id + partition_suffix, - } - ) - - -@pytest.mark.parametrize( - "data_format,expected_schema_type", - ( - (None, "avro_schema"), # Default format (Avro). - (bigquery_storage_v1beta1.enums.DataFormat.AVRO, "avro_schema"), - (bigquery_storage_v1beta1.enums.DataFormat.ARROW, "arrow_schema"), - ), -) -def test_read_rows_as_blocks_full_table( - client, project_id, small_table_reference, data_format, expected_schema_type -): - session = client.create_read_session( - small_table_reference, - "projects/{}".format(project_id), - format_=data_format, - requested_streams=1, - ) - stream_pos = bigquery_storage_v1beta1.types.StreamPosition( - stream=session.streams[0] - ) - schema_type = session.WhichOneof("schema") - assert schema_type == expected_schema_type - - blocks = list(client.read_rows(stream_pos)) - - assert len(blocks) > 0 - block = blocks[0] - assert block.status.estimated_row_count > 0 - - -@pytest.mark.parametrize( - "data_format,expected_schema_type", - ( - (bigquery_storage_v1beta1.enums.DataFormat.AVRO, "avro_schema"), - (bigquery_storage_v1beta1.enums.DataFormat.ARROW, "arrow_schema"), - ), -) -def test_read_rows_as_rows_full_table( - client, project_id, small_table_reference, data_format, expected_schema_type -): - session = client.create_read_session( - small_table_reference, - "projects/{}".format(project_id), - format_=data_format, - requested_streams=1, - ) - stream_pos = bigquery_storage_v1beta1.types.StreamPosition( - stream=session.streams[0] - ) - - rows = list(client.read_rows(stream_pos).rows(session)) - - assert len(rows) > 0 - - -@pytest.mark.parametrize( - "data_format", - ( - (bigquery_storage_v1beta1.enums.DataFormat.AVRO), - (bigquery_storage_v1beta1.enums.DataFormat.ARROW), - ), -) -def test_basic_nonfiltered_read(client, project_id, table_with_data_ref, data_format): - session = client.create_read_session( - table_with_data_ref, - "projects/{}".format(project_id), - format_=data_format, - requested_streams=1, - ) - stream_pos = bigquery_storage_v1beta1.types.StreamPosition( - stream=session.streams[0] - ) - - rows = list(client.read_rows(stream_pos).rows(session)) - - assert len(rows) == 5 # all table rows - - -def test_filtered_rows_read(client, project_id, table_with_data_ref): - read_options = bigquery_storage_v1beta1.types.TableReadOptions() - read_options.row_restriction = "age >= 50" - - session = client.create_read_session( - table_with_data_ref, - "projects/{}".format(project_id), - format_=bigquery_storage_v1beta1.enums.DataFormat.AVRO, - requested_streams=1, - read_options=read_options, - ) - stream_pos = bigquery_storage_v1beta1.types.StreamPosition( - stream=session.streams[0] - ) - - rows = list(client.read_rows(stream_pos).rows(session)) - - assert len(rows) == 2 - - -@pytest.mark.parametrize( - "data_format", - ( - (bigquery_storage_v1beta1.enums.DataFormat.AVRO), - (bigquery_storage_v1beta1.enums.DataFormat.ARROW), - ), -) -def test_column_selection_read(client, project_id, table_with_data_ref, data_format): - read_options = bigquery_storage_v1beta1.types.TableReadOptions() - read_options.selected_fields.append("first_name") - read_options.selected_fields.append("age") - - session = client.create_read_session( - table_with_data_ref, - "projects/{}".format(project_id), - format_=data_format, - requested_streams=1, - read_options=read_options, - ) - stream_pos = bigquery_storage_v1beta1.types.StreamPosition( - stream=session.streams[0] - ) - - rows = list(client.read_rows(stream_pos).rows(session)) - - for row in rows: - assert sorted(row.keys()) == ["age", "first_name"] - - -def test_snapshot(client, project_id, table_with_data_ref, bq_client): - before_new_data = timestamp_pb2.Timestamp() - before_new_data.GetCurrentTime() - - # load additional data into the table - new_data = [ - {u"first_name": u"NewGuyFoo", u"last_name": u"Smith", u"age": 46}, - {u"first_name": u"NewGuyBar", u"last_name": u"Jones", u"age": 30}, - ] - - destination = _to_bq_table_ref(table_with_data_ref) - bq_client.load_table_from_json(new_data, destination).result() - - # read data using the timestamp before the additional data load - session = client.create_read_session( - table_with_data_ref, - "projects/{}".format(project_id), - format_=bigquery_storage_v1beta1.enums.DataFormat.AVRO, - requested_streams=1, - table_modifiers={"snapshot_time": before_new_data}, - ) - stream_pos = bigquery_storage_v1beta1.types.StreamPosition( - stream=session.streams[0] - ) - - rows = list(client.read_rows(stream_pos).rows(session)) - - # verify that only the data before the timestamp was returned - assert len(rows) == 5 # all initial records - - for row in rows: - assert "NewGuy" not in row["first_name"] # no new records - - -def test_column_partitioned_table( - client, project_id, col_partition_table_ref, bq_client -): - data = [ - {"description": "Tracking established.", "occurred": "2017-02-15"}, - {"description": "Look, a solar eclipse!", "occurred": "2018-02-15"}, - {"description": "Fake solar eclipse reported.", "occurred": "2018-02-15"}, - {"description": "1 day after false eclipse report.", "occurred": "2018-02-16"}, - {"description": "1 year after false eclipse report.", "occurred": "2019-02-15"}, - ] - - destination = _to_bq_table_ref(col_partition_table_ref) - bq_client.load_table_from_json(data, destination).result() - - # Read from the table with a partition filter specified, and verify that - # only the expected data is returned. - read_options = bigquery_storage_v1beta1.types.TableReadOptions() - read_options.row_restriction = "occurred = '2018-02-15'" - - session = client.create_read_session( - col_partition_table_ref, - "projects/{}".format(project_id), - format_=bigquery_storage_v1beta1.enums.DataFormat.AVRO, - requested_streams=1, - read_options=read_options, - ) - - assert session.streams # there should be some data to fetch - - stream_pos = bigquery_storage_v1beta1.types.StreamPosition( - stream=session.streams[0] - ) - rows = list(client.read_rows(stream_pos).rows(session)) - - assert len(rows) == 2 - - expected_descriptions = ("Look, a solar eclipse!", "Fake solar eclipse reported.") - for row in rows: - assert row["occurred"] == dt.date(2018, 2, 15) - assert row["description"] in expected_descriptions - - -@pytest.mark.parametrize( - "data_format", - ( - (bigquery_storage_v1beta1.enums.DataFormat.AVRO), - (bigquery_storage_v1beta1.enums.DataFormat.ARROW), - ), -) -def test_ingestion_time_partitioned_table( - client, project_id, ingest_partition_table_ref, bq_client, data_format -): - data = [{"shape": "cigar", "altitude": 1200}, {"shape": "disc", "altitude": 750}] - destination = _to_bq_table_ref( - ingest_partition_table_ref, partition_suffix="$20190809" - ) - bq_client.load_table_from_json(data, destination).result() - - data = [ - {"shape": "sphere", "altitude": 3500}, - {"shape": "doughnut", "altitude": 100}, - ] - destination = _to_bq_table_ref( - ingest_partition_table_ref, partition_suffix="$20190810" - ) - bq_client.load_table_from_json(data, destination).result() - - data = [ - {"shape": "elephant", "altitude": 1}, - {"shape": "rocket", "altitude": 12700}, - ] - destination = _to_bq_table_ref( - ingest_partition_table_ref, partition_suffix="$20190811" - ) - bq_client.load_table_from_json(data, destination).result() - - read_options = bigquery_storage_v1beta1.types.TableReadOptions() - read_options.row_restriction = "DATE(_PARTITIONTIME) = '2019-08-10'" - - session = client.create_read_session( - ingest_partition_table_ref, - "projects/{}".format(project_id), - format_=data_format, - requested_streams=1, - read_options=read_options, - ) - - assert session.streams # there should be some data to fetch - - stream_pos = bigquery_storage_v1beta1.types.StreamPosition( - stream=session.streams[0] - ) - rows = list(client.read_rows(stream_pos).rows(session)) - assert len(rows) == 2 - - actual_items = {(row["shape"], row["altitude"]) for row in rows} - expected_items = {("sphere", 3500), ("doughnut", 100)} - assert actual_items == expected_items - - -@pytest.mark.parametrize( - "data_format", - ( - (bigquery_storage_v1beta1.enums.DataFormat.AVRO), - (bigquery_storage_v1beta1.enums.DataFormat.ARROW), - ), -) -def test_decoding_data_types( - client, project_id, all_types_table_ref, bq_client, data_format -): - data = [ - { - u"string_field": u"Price: € 9.95.", - u"bytes_field": bigquery._helpers._bytes_to_json(b"byteees"), - u"int64_field": -1085, - u"float64_field": -42.195, - u"numeric_field": "1.4142", - u"bool_field": True, - u"geography_field": '{"type": "Point", "coordinates": [-49.3028, 69.0622]}', - u"person_struct_field": {u"name": u"John", u"age": 42}, - u"timestamp_field": 1565357902.017896, # 2019-08-09T13:38:22.017896 - u"date_field": u"1995-03-17", - u"time_field": u"16:24:51", - u"datetime_field": u"2005-10-26T19:49:41", - u"string_array_field": [u"foo", u"bar", u"baz"], - } - ] - - # Explicit schema is needed to recognize bytes_field as BYTES, and not STRING. - # Since partial schemas are not supported in load_table_from_json(), a full - # schema needs to be specified. - schema = [ - bigquery.SchemaField("string_field", "STRING"), - bigquery.SchemaField("bytes_field", "BYTES"), - bigquery.SchemaField("int64_field", "INT64"), - bigquery.SchemaField("float64_field", "FLOAT64"), - bigquery.SchemaField("numeric_field", "NUMERIC"), - bigquery.SchemaField("bool_field", "BOOL"), - bigquery.SchemaField("geography_field", "GEOGRAPHY"), - bigquery.SchemaField( - "person_struct_field", - "STRUCT", - fields=( - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("age", "INT64"), - ), - ), - bigquery.SchemaField("timestamp_field", "TIMESTAMP"), - bigquery.SchemaField("date_field", "DATE"), - bigquery.SchemaField("time_field", "TIME"), - bigquery.SchemaField("datetime_field", "DATETIME"), - bigquery.SchemaField("string_array_field", "STRING", mode="REPEATED"), - ] - - job_config = bigquery.LoadJobConfig(schema=schema) - destination = _to_bq_table_ref(all_types_table_ref) - bq_client.load_table_from_json(data, destination, job_config=job_config).result() - - session = client.create_read_session( - all_types_table_ref, - "projects/{}".format(project_id), - format_=data_format, - requested_streams=1, - ) - - assert session.streams # there should be data available - - stream_pos = bigquery_storage_v1beta1.types.StreamPosition( - stream=session.streams[0] - ) - - rows = list(client.read_rows(stream_pos).rows(session)) - - expected_result = { - u"string_field": u"Price: € 9.95.", - u"bytes_field": b"byteees", - u"int64_field": -1085, - u"float64_field": -42.195, - u"numeric_field": decimal.Decimal("1.4142"), - u"bool_field": True, - u"geography_field": "POINT(-49.3028 69.0622)", - u"person_struct_field": {u"name": u"John", u"age": 42}, - u"timestamp_field": dt.datetime(2019, 8, 9, 13, 38, 22, 17896, tzinfo=pytz.UTC), - u"date_field": dt.date(1995, 3, 17), - u"time_field": dt.time(16, 24, 51), - u"string_array_field": [u"foo", u"bar", u"baz"], - } - - result_copy = copy.copy(rows[0]) - del result_copy["datetime_field"] - assert result_copy == expected_result - - # Compare datetime separately, AVRO and PYARROW return different object types, - # although they should both represent the same value. - # TODO: when fixed, change assertion to assert a datetime instance! - expected_pattern = re.compile(r"2005-10-26( |T)19:49:41") - assert expected_pattern.match(str(rows[0]["datetime_field"])) - - -@pytest.mark.parametrize( - "data_format", - ( - (bigquery_storage_v1beta1.enums.DataFormat.AVRO), - (bigquery_storage_v1beta1.enums.DataFormat.ARROW), - ), -) -def test_resuming_read_from_offset(client, project_id, data_format): - shakespeare_ref = bigquery_storage_v1beta1.types.TableReference() - shakespeare_ref.project_id = project_id - shakespeare_ref.dataset_id = "public_samples_copy" - shakespeare_ref.table_id = "shakespeare" - - read_session = client.create_read_session( - shakespeare_ref, - "projects/{}".format(project_id), - format_=data_format, - requested_streams=1, - ) - - assert read_session.streams # there should be data available - - stream_pos = bigquery_storage_v1beta1.types.StreamPosition( - stream=read_session.streams[0], offset=0 - ) - read_rows_stream = client.read_rows(stream_pos) - - # fetch the first two batches of rows - rows_iter = iter(read_rows_stream) - some_rows = next(rows_iter) - more_rows = next(rows_iter) - - # fetch the rest of the rows using the stream offset - new_stream_pos = bigquery_storage_v1beta1.types.StreamPosition( - stream=read_session.streams[0], offset=some_rows.row_count + more_rows.row_count - ) - remaining_rows_count = sum( - 1 for _ in client.read_rows(new_stream_pos).rows(read_session) - ) - - # verify that the counts match - expected_len = 164656 # total rows in shakespeare table - actual_len = remaining_rows_count + some_rows.row_count + more_rows.row_count - assert actual_len == expected_len diff --git a/bigquery_storage/tests/system/test_reader_dataframe.py b/bigquery_storage/tests/system/test_reader_dataframe.py deleted file mode 100644 index 07dcab384c05..000000000000 --- a/bigquery_storage/tests/system/test_reader_dataframe.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""System tests for reading rows with pandas connector.""" - -import numpy -import pyarrow.types -import pytest - -from google.cloud import bigquery_storage_v1beta1 - - -def test_read_rows_to_arrow(client, project_id): - table_ref = bigquery_storage_v1beta1.types.TableReference() - table_ref.project_id = "bigquery-public-data" - table_ref.dataset_id = "new_york_citibike" - table_ref.table_id = "citibike_stations" - - read_options = bigquery_storage_v1beta1.types.TableReadOptions() - read_options.selected_fields.append("station_id") - read_options.selected_fields.append("latitude") - read_options.selected_fields.append("longitude") - read_options.selected_fields.append("name") - session = client.create_read_session( - table_ref, - "projects/{}".format(project_id), - format_=bigquery_storage_v1beta1.enums.DataFormat.ARROW, - read_options=read_options, - requested_streams=1, - ) - stream_pos = bigquery_storage_v1beta1.types.StreamPosition( - stream=session.streams[0] - ) - - tbl = client.read_rows(stream_pos).to_arrow(session) - - assert tbl.num_columns == 4 - schema = tbl.schema - # Use field_by_name because the order doesn't currently match that of - # selected_fields. - assert pyarrow.types.is_int64(schema.field_by_name("station_id").type) - assert pyarrow.types.is_float64(schema.field_by_name("latitude").type) - assert pyarrow.types.is_float64(schema.field_by_name("longitude").type) - assert pyarrow.types.is_string(schema.field_by_name("name").type) - - -@pytest.mark.parametrize( - "data_format,expected_schema_type", - ( - (bigquery_storage_v1beta1.enums.DataFormat.AVRO, "avro_schema"), - (bigquery_storage_v1beta1.enums.DataFormat.ARROW, "arrow_schema"), - ), -) -def test_read_rows_to_dataframe(client, project_id, data_format, expected_schema_type): - table_ref = bigquery_storage_v1beta1.types.TableReference() - table_ref.project_id = "bigquery-public-data" - table_ref.dataset_id = "new_york_citibike" - table_ref.table_id = "citibike_stations" - session = client.create_read_session( - table_ref, - "projects/{}".format(project_id), - format_=data_format, - requested_streams=1, - ) - schema_type = session.WhichOneof("schema") - assert schema_type == expected_schema_type - - stream_pos = bigquery_storage_v1beta1.types.StreamPosition( - stream=session.streams[0] - ) - - frame = client.read_rows(stream_pos).to_dataframe( - session, dtypes={"latitude": numpy.float16} - ) - - # Station ID is a required field (no nulls), so the datatype should always - # be integer. - assert frame.station_id.dtype.name == "int64" - assert frame.latitude.dtype.name == "float16" - assert frame.longitude.dtype.name == "float64" - assert frame["name"].str.startswith("Central Park").any() diff --git a/bigquery_storage/tests/unit/gapic/v1beta1/test_big_query_storage_client_v1beta1.py b/bigquery_storage/tests/unit/gapic/v1beta1/test_big_query_storage_client_v1beta1.py deleted file mode 100644 index e7870b1d25d6..000000000000 --- a/bigquery_storage/tests/unit/gapic/v1beta1/test_big_query_storage_client_v1beta1.py +++ /dev/null @@ -1,261 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud.bigquery_storage_v1beta1.gapic import big_query_storage_client # noqa -from google.cloud.bigquery_storage_v1beta1.proto import storage_pb2 -from google.cloud.bigquery_storage_v1beta1.proto import table_reference_pb2 -from google.protobuf import empty_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - def unary_stream(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestBigQueryStorageClient(object): - def test_create_read_session(self): - # Setup Expected Response - name = "name3373707" - expected_response = {"name": name} - expected_response = storage_pb2.ReadSession(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = big_query_storage_client.BigQueryStorageClient() - - # Setup Request - table_reference = {} - parent = "parent-995424086" - - response = client.create_read_session(table_reference, parent) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = storage_pb2.CreateReadSessionRequest( - table_reference=table_reference, parent=parent - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_read_session_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = big_query_storage_client.BigQueryStorageClient() - - # Setup request - table_reference = {} - parent = "parent-995424086" - - with pytest.raises(CustomException): - client.create_read_session(table_reference, parent) - - def test_read_rows(self): - # Setup Expected Response - row_count = 1340416618 - expected_response = {"row_count": row_count} - expected_response = storage_pb2.ReadRowsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[iter([expected_response])]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = big_query_storage_client.BigQueryStorageClient() - - # Setup Request - read_position = {} - - response = client.read_rows(read_position) - resources = list(response) - assert len(resources) == 1 - assert expected_response == resources[0] - - assert len(channel.requests) == 1 - expected_request = storage_pb2.ReadRowsRequest(read_position=read_position) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_read_rows_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = big_query_storage_client.BigQueryStorageClient() - - # Setup request - read_position = {} - - with pytest.raises(CustomException): - client.read_rows(read_position) - - def test_batch_create_read_session_streams(self): - # Setup Expected Response - expected_response = {} - expected_response = storage_pb2.BatchCreateReadSessionStreamsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = big_query_storage_client.BigQueryStorageClient() - - # Setup Request - session = {} - requested_streams = 1017221410 - - response = client.batch_create_read_session_streams(session, requested_streams) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = storage_pb2.BatchCreateReadSessionStreamsRequest( - session=session, requested_streams=requested_streams - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_batch_create_read_session_streams_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = big_query_storage_client.BigQueryStorageClient() - - # Setup request - session = {} - requested_streams = 1017221410 - - with pytest.raises(CustomException): - client.batch_create_read_session_streams(session, requested_streams) - - def test_finalize_stream(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = big_query_storage_client.BigQueryStorageClient() - - # Setup Request - stream = {} - - client.finalize_stream(stream) - - assert len(channel.requests) == 1 - expected_request = storage_pb2.FinalizeStreamRequest(stream=stream) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_finalize_stream_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = big_query_storage_client.BigQueryStorageClient() - - # Setup request - stream = {} - - with pytest.raises(CustomException): - client.finalize_stream(stream) - - def test_split_read_stream(self): - # Setup Expected Response - expected_response = {} - expected_response = storage_pb2.SplitReadStreamResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = big_query_storage_client.BigQueryStorageClient() - - # Setup Request - original_stream = {} - - response = client.split_read_stream(original_stream) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = storage_pb2.SplitReadStreamRequest( - original_stream=original_stream - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_split_read_stream_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = big_query_storage_client.BigQueryStorageClient() - - # Setup request - original_stream = {} - - with pytest.raises(CustomException): - client.split_read_stream(original_stream) diff --git a/bigquery_storage/tests/unit/test_client.py b/bigquery_storage/tests/unit/test_client.py deleted file mode 100644 index fbce027d136e..000000000000 --- a/bigquery_storage/tests/unit/test_client.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.api_core.gapic_v1 import client_info -import mock -import pytest - -from google.cloud.bigquery_storage_v1beta1 import types - - -PROJECT = "my-project" -SERVICE_ACCOUNT_PROJECT = "project-from-credentials" - - -@pytest.fixture() -def mock_transport(monkeypatch): - from google.cloud.bigquery_storage_v1beta1.gapic.transports import ( - big_query_storage_grpc_transport, - ) - - transport = mock.create_autospec( - big_query_storage_grpc_transport.BigQueryStorageGrpcTransport - ) - return transport - - -@pytest.fixture() -def client_under_test(mock_transport): - from google.cloud.bigquery_storage_v1beta1 import client - - # The mock is detected as a callable. By creating a real callable here, the - # mock can still be used to verify RPCs. - def transport_callable(credentials=None, default_class=None, address=None): - return mock_transport - - return client.BigQueryStorageClient(transport=transport_callable) - - -def test_constructor_w_client_info(mock_transport): - from google.cloud.bigquery_storage_v1beta1 import client - - def transport_callable(credentials=None, default_class=None, address=None): - return mock_transport - - client_under_test = client.BigQueryStorageClient( - transport=transport_callable, - client_info=client_info.ClientInfo( - client_library_version="test-client-version" - ), - ) - - user_agent = client_under_test._client_info.to_user_agent() - assert "test-client-version" in user_agent - - -def test_create_read_session(mock_transport, client_under_test): - table_reference = types.TableReference( - project_id="data-project-id", dataset_id="dataset_id", table_id="table_id" - ) - - client_under_test.create_read_session(table_reference, "projects/other-project") - - expected_request = types.CreateReadSessionRequest( - table_reference=table_reference, parent="projects/other-project" - ) - mock_transport.create_read_session.assert_called_once_with( - expected_request, metadata=mock.ANY, timeout=mock.ANY - ) - - -def test_read_rows(mock_transport, client_under_test): - stream_position = types.StreamPosition() - - client_under_test.read_rows(stream_position) - - expected_request = types.ReadRowsRequest(read_position=stream_position) - mock_transport.create_read_session.read_rows( - expected_request, metadata=mock.ANY, timeout=mock.ANY - ) diff --git a/bigquery_storage/tests/unit/test_reader.py b/bigquery_storage/tests/unit/test_reader.py deleted file mode 100644 index 3d5127522eea..000000000000 --- a/bigquery_storage/tests/unit/test_reader.py +++ /dev/null @@ -1,863 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import decimal -import itertools -import json - -import fastavro -import pyarrow -import mock -import pandas -import pandas.testing -import pytest -import pytz -import six - -import google.api_core.exceptions -from google.cloud import bigquery_storage_v1beta1 - - -PROJECT = "my-project" -BQ_TO_AVRO_TYPES = { - "int64": "long", - "float64": "double", - "bool": "boolean", - "numeric": {"type": "bytes", "logicalType": "decimal", "precision": 38, "scale": 9}, - "string": "string", - "bytes": "bytes", - "date": {"type": "int", "logicalType": "date"}, - "datetime": {"type": "string", "sqlType": "DATETIME"}, - "time": {"type": "long", "logicalType": "time-micros"}, - "timestamp": {"type": "long", "logicalType": "timestamp-micros"}, -} -# This dictionary is duplicated in bigquery/google/cloud/bigquery/_pandas_helpers.py -# When modifying it be sure to update it there as well. -BQ_TO_ARROW_TYPES = { - "int64": pyarrow.int64(), - "float64": pyarrow.float64(), - "bool": pyarrow.bool_(), - "numeric": pyarrow.decimal128(38, 9), - "string": pyarrow.utf8(), - "bytes": pyarrow.binary(), - "date": pyarrow.date32(), # int32 days since epoch - "datetime": pyarrow.timestamp("us"), - "time": pyarrow.time64("us"), - "timestamp": pyarrow.timestamp("us", tz="UTC"), -} -SCALAR_COLUMNS = [ - {"name": "int_col", "type": "int64"}, - {"name": "float_col", "type": "float64"}, - {"name": "num_col", "type": "numeric"}, - {"name": "bool_col", "type": "bool"}, - {"name": "str_col", "type": "string"}, - {"name": "bytes_col", "type": "bytes"}, - {"name": "date_col", "type": "date"}, - {"name": "time_col", "type": "time"}, - {"name": "ts_col", "type": "timestamp"}, -] -SCALAR_COLUMN_NAMES = [field["name"] for field in SCALAR_COLUMNS] -SCALAR_BLOCKS = [ - [ - { - "int_col": 123, - "float_col": 3.14, - "num_col": decimal.Decimal("9.99"), - "bool_col": True, - "str_col": "hello world", - "bytes_col": b"ascii bytes", - "date_col": datetime.date(1998, 9, 4), - "time_col": datetime.time(12, 0), - "ts_col": datetime.datetime(2000, 1, 1, 5, 0, tzinfo=pytz.utc), - }, - { - "int_col": 456, - "float_col": 2.72, - "num_col": decimal.Decimal("0.99"), - "bool_col": False, - "str_col": "hallo welt", - "bytes_col": b"\xbb\xee\xff", - "date_col": datetime.date(1995, 3, 2), - "time_col": datetime.time(13, 37), - "ts_col": datetime.datetime(1965, 4, 3, 2, 1, tzinfo=pytz.utc), - }, - ], - [ - { - "int_col": 789, - "float_col": 1.23, - "num_col": decimal.Decimal("5.67"), - "bool_col": True, - "str_col": u"こんにちは世界", - "bytes_col": b"\x54\x69\x6d", - "date_col": datetime.date(1970, 1, 1), - "time_col": datetime.time(16, 20), - "ts_col": datetime.datetime(1991, 8, 25, 20, 57, 8, tzinfo=pytz.utc), - } - ], -] - - -@pytest.fixture() -def mut(): - from google.cloud.bigquery_storage_v1beta1 import reader - - return reader - - -@pytest.fixture() -def class_under_test(mut): - return mut.ReadRowsStream - - -@pytest.fixture() -def mock_client(): - from google.cloud.bigquery_storage_v1beta1.gapic import big_query_storage_client - - return mock.create_autospec(big_query_storage_client.BigQueryStorageClient) - - -def _bq_to_avro_blocks(bq_blocks, avro_schema_json): - avro_schema = fastavro.parse_schema(avro_schema_json) - avro_blocks = [] - for block in bq_blocks: - blockio = six.BytesIO() - for row in block: - fastavro.schemaless_writer(blockio, avro_schema, row) - - response = bigquery_storage_v1beta1.types.ReadRowsResponse() - response.row_count = len(block) - response.avro_rows.serialized_binary_rows = blockio.getvalue() - avro_blocks.append(response) - return avro_blocks - - -def _bq_to_arrow_batch_objects(bq_blocks, arrow_schema): - arrow_batches = [] - for block in bq_blocks: - arrays = [] - for name in arrow_schema.names: - arrays.append( - pyarrow.array( - (row[name] for row in block), - type=arrow_schema.field(name).type, - size=len(block), - ) - ) - arrow_batches.append( - pyarrow.RecordBatch.from_arrays(arrays, schema=arrow_schema) - ) - return arrow_batches - - -def _bq_to_arrow_batches(bq_blocks, arrow_schema): - arrow_batches = [] - for record_batch in _bq_to_arrow_batch_objects(bq_blocks, arrow_schema): - response = bigquery_storage_v1beta1.types.ReadRowsResponse() - response.arrow_record_batch.serialized_record_batch = ( - record_batch.serialize().to_pybytes() - ) - arrow_batches.append(response) - return arrow_batches - - -def _pages_w_nonresumable_internal_error(avro_blocks): - for block in avro_blocks: - yield block - raise google.api_core.exceptions.InternalServerError( - "INTERNAL: Got a nonresumable error." - ) - - -def _pages_w_resumable_internal_error(avro_blocks): - for block in avro_blocks: - yield block - raise google.api_core.exceptions.InternalServerError( - "INTERNAL: Received RST_STREAM with error code 2." - ) - - -def _pages_w_unavailable(pages): - for page in pages: - yield page - raise google.api_core.exceptions.ServiceUnavailable("test: please reconnect") - - -def _avro_blocks_w_deadline(avro_blocks): - for block in avro_blocks: - yield block - raise google.api_core.exceptions.DeadlineExceeded("test: timeout, don't reconnect") - - -def _generate_avro_read_session(avro_schema_json): - schema = json.dumps(avro_schema_json) - return bigquery_storage_v1beta1.types.ReadSession(avro_schema={"schema": schema}) - - -def _generate_arrow_read_session(arrow_schema): - return bigquery_storage_v1beta1.types.ReadSession( - arrow_schema={"serialized_schema": arrow_schema.serialize().to_pybytes()} - ) - - -def _bq_to_avro_schema(bq_columns): - fields = [] - avro_schema = {"type": "record", "name": "__root__", "fields": fields} - - for column in bq_columns: - doc = column.get("description") - name = column["name"] - type_ = BQ_TO_AVRO_TYPES[column["type"]] - mode = column.get("mode", "nullable").lower() - - if mode == "nullable": - type_ = ["null", type_] - - fields.append({"name": name, "type": type_, "doc": doc}) - - return avro_schema - - -def _bq_to_arrow_schema(bq_columns): - def bq_col_as_field(column): - doc = column.get("description") - name = column["name"] - type_ = BQ_TO_ARROW_TYPES[column["type"]] - mode = column.get("mode", "nullable").lower() - - return pyarrow.field(name, type_, mode == "nullable", {"description": doc}) - - return pyarrow.schema(bq_col_as_field(c) for c in bq_columns) - - -def _get_avro_bytes(rows, avro_schema): - avro_file = six.BytesIO() - for row in rows: - fastavro.schemaless_writer(avro_file, avro_schema, row) - return avro_file.getvalue() - - -def test_avro_rows_raises_import_error(mut, class_under_test, mock_client, monkeypatch): - monkeypatch.setattr(mut, "fastavro", None) - reader = class_under_test( - [], mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - - bq_columns = [{"name": "int_col", "type": "int64"}] - avro_schema = _bq_to_avro_schema(bq_columns) - read_session = _generate_avro_read_session(avro_schema) - - with pytest.raises(ImportError): - reader.rows(read_session) - - -def test_pyarrow_rows_raises_import_error( - mut, class_under_test, mock_client, monkeypatch -): - monkeypatch.setattr(mut, "pyarrow", None) - reader = class_under_test( - [], mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - - bq_columns = [{"name": "int_col", "type": "int64"}] - arrow_schema = _bq_to_arrow_schema(bq_columns) - read_session = _generate_arrow_read_session(arrow_schema) - - with pytest.raises(ImportError): - reader.rows(read_session) - - -def test_rows_no_schema_set_raises_type_error( - mut, class_under_test, mock_client, monkeypatch -): - reader = class_under_test( - [], mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - read_session = bigquery_storage_v1beta1.types.ReadSession() - - with pytest.raises(TypeError): - reader.rows(read_session) - - -def test_rows_w_empty_stream(class_under_test, mock_client): - bq_columns = [{"name": "int_col", "type": "int64"}] - avro_schema = _bq_to_avro_schema(bq_columns) - read_session = _generate_avro_read_session(avro_schema) - reader = class_under_test( - [], mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - - got = reader.rows(read_session) - assert got.total_rows is None - assert tuple(got) == () - - -def test_rows_w_empty_stream_arrow(class_under_test, mock_client): - bq_columns = [{"name": "int_col", "type": "int64"}] - arrow_schema = _bq_to_arrow_schema(bq_columns) - read_session = _generate_arrow_read_session(arrow_schema) - reader = class_under_test( - [], mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - - got = reader.rows(read_session) - assert got.total_rows is None - assert tuple(got) == () - - -def test_rows_w_scalars(class_under_test, mock_client): - avro_schema = _bq_to_avro_schema(SCALAR_COLUMNS) - read_session = _generate_avro_read_session(avro_schema) - avro_blocks = _bq_to_avro_blocks(SCALAR_BLOCKS, avro_schema) - - reader = class_under_test( - avro_blocks, mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - got = tuple(reader.rows(read_session)) - - expected = tuple(itertools.chain.from_iterable(SCALAR_BLOCKS)) - assert got == expected - - -def test_rows_w_scalars_arrow(class_under_test, mock_client): - arrow_schema = _bq_to_arrow_schema(SCALAR_COLUMNS) - read_session = _generate_arrow_read_session(arrow_schema) - arrow_batches = _bq_to_arrow_batches(SCALAR_BLOCKS, arrow_schema) - - reader = class_under_test( - arrow_batches, mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - got = tuple(reader.rows(read_session)) - - expected = tuple(itertools.chain.from_iterable(SCALAR_BLOCKS)) - assert got == expected - - -def test_rows_w_timeout(class_under_test, mock_client): - bq_columns = [{"name": "int_col", "type": "int64"}] - avro_schema = _bq_to_avro_schema(bq_columns) - read_session = _generate_avro_read_session(avro_schema) - bq_blocks_1 = [ - [{"int_col": 123}, {"int_col": 234}], - [{"int_col": 345}, {"int_col": 456}], - ] - avro_blocks_1 = _avro_blocks_w_deadline( - _bq_to_avro_blocks(bq_blocks_1, avro_schema) - ) - bq_blocks_2 = [[{"int_col": 567}, {"int_col": 789}], [{"int_col": 890}]] - avro_blocks_2 = _bq_to_avro_blocks(bq_blocks_2, avro_schema) - - mock_client.read_rows.return_value = avro_blocks_2 - stream_position = bigquery_storage_v1beta1.types.StreamPosition( - stream={"name": "test"} - ) - - reader = class_under_test( - avro_blocks_1, - mock_client, - stream_position, - {"metadata": {"test-key": "test-value"}}, - ) - - with pytest.raises(google.api_core.exceptions.DeadlineExceeded): - list(reader.rows(read_session)) - - # Don't reconnect on DeadlineException. This allows user-specified timeouts - # to be respected. - mock_client.read_rows.assert_not_called() - - -def test_rows_w_nonresumable_internal_error(class_under_test, mock_client): - bq_columns = [{"name": "int_col", "type": "int64"}] - avro_schema = _bq_to_avro_schema(bq_columns) - read_session = _generate_avro_read_session(avro_schema) - bq_blocks = [[{"int_col": 1024}, {"int_col": 512}], [{"int_col": 256}]] - avro_blocks = _pages_w_nonresumable_internal_error( - _bq_to_avro_blocks(bq_blocks, avro_schema) - ) - - stream_position = bigquery_storage_v1beta1.types.StreamPosition( - stream={"name": "test"} - ) - - reader = class_under_test(avro_blocks, mock_client, stream_position, {}) - - with pytest.raises( - google.api_core.exceptions.InternalServerError, match="nonresumable error" - ): - list(reader.rows(read_session)) - - mock_client.read_rows.assert_not_called() - - -def test_rows_w_reconnect(class_under_test, mock_client): - bq_columns = [{"name": "int_col", "type": "int64"}] - avro_schema = _bq_to_avro_schema(bq_columns) - read_session = _generate_avro_read_session(avro_schema) - bq_blocks_1 = [ - [{"int_col": 123}, {"int_col": 234}], - [{"int_col": 345}, {"int_col": 456}], - ] - avro_blocks_1 = _pages_w_unavailable(_bq_to_avro_blocks(bq_blocks_1, avro_schema)) - bq_blocks_2 = [[{"int_col": 1024}, {"int_col": 512}], [{"int_col": 256}]] - avro_blocks_2 = _bq_to_avro_blocks(bq_blocks_2, avro_schema) - avro_blocks_2 = _pages_w_resumable_internal_error( - _bq_to_avro_blocks(bq_blocks_2, avro_schema) - ) - bq_blocks_3 = [[{"int_col": 567}, {"int_col": 789}], [{"int_col": 890}]] - avro_blocks_3 = _bq_to_avro_blocks(bq_blocks_3, avro_schema) - - for block in avro_blocks_3: - block.status.estimated_row_count = 7 - - mock_client.read_rows.side_effect = (avro_blocks_2, avro_blocks_3) - stream_position = bigquery_storage_v1beta1.types.StreamPosition( - stream={"name": "test"} - ) - - reader = class_under_test( - avro_blocks_1, - mock_client, - stream_position, - {"metadata": {"test-key": "test-value"}}, - ) - got = reader.rows(read_session) - - expected = tuple( - itertools.chain( - itertools.chain.from_iterable(bq_blocks_1), - itertools.chain.from_iterable(bq_blocks_2), - itertools.chain.from_iterable(bq_blocks_3), - ) - ) - - assert tuple(got) == expected - assert got.total_rows == 7 - mock_client.read_rows.assert_any_call( - bigquery_storage_v1beta1.types.StreamPosition( - stream={"name": "test"}, offset=4 - ), - metadata={"test-key": "test-value"}, - ) - mock_client.read_rows.assert_called_with( - bigquery_storage_v1beta1.types.StreamPosition( - stream={"name": "test"}, offset=7 - ), - metadata={"test-key": "test-value"}, - ) - - -def test_rows_w_reconnect_by_page(class_under_test, mock_client): - bq_columns = [{"name": "int_col", "type": "int64"}] - avro_schema = _bq_to_avro_schema(bq_columns) - read_session = _generate_avro_read_session(avro_schema) - bq_blocks_1 = [ - [{"int_col": 123}, {"int_col": 234}], - [{"int_col": 345}, {"int_col": 456}], - ] - avro_blocks_1 = _bq_to_avro_blocks(bq_blocks_1, avro_schema) - bq_blocks_2 = [[{"int_col": 567}, {"int_col": 789}], [{"int_col": 890}]] - avro_blocks_2 = _bq_to_avro_blocks(bq_blocks_2, avro_schema) - - avro_blocks_1[0].status.estimated_row_count = 8 - avro_blocks_1[1].status.estimated_row_count = 6 - avro_blocks_2[0].status.estimated_row_count = 9 - avro_blocks_2[1].status.estimated_row_count = 7 - - mock_client.read_rows.return_value = avro_blocks_2 - stream_position = bigquery_storage_v1beta1.types.StreamPosition( - stream={"name": "test"} - ) - - reader = class_under_test( - _pages_w_unavailable(avro_blocks_1), - mock_client, - stream_position, - {"metadata": {"test-key": "test-value"}}, - ) - got = reader.rows(read_session) - pages = iter(got.pages) - - assert got.total_rows is None - - page_1 = next(pages) - assert got.total_rows == 8 - assert page_1.num_items == 2 - assert page_1.remaining == 2 - assert tuple(page_1) == tuple(bq_blocks_1[0]) - assert page_1.num_items == 2 - assert page_1.remaining == 0 - - page_2 = next(pages) - assert got.total_rows == 6 - assert next(page_2) == bq_blocks_1[1][0] - assert page_2.num_items == 2 - assert page_2.remaining == 1 - assert next(page_2) == bq_blocks_1[1][1] - - page_3 = next(pages) - assert tuple(page_3) == tuple(bq_blocks_2[0]) - assert page_3.num_items == 2 - assert page_3.remaining == 0 - assert got.total_rows == 9 - - page_4 = next(pages) - assert got.total_rows == 7 - assert tuple(page_4) == tuple(bq_blocks_2[1]) - assert page_4.num_items == 1 - assert page_4.remaining == 0 - - -def test_to_arrow_no_pyarrow_raises_import_error( - mut, class_under_test, mock_client, monkeypatch -): - monkeypatch.setattr(mut, "pyarrow", None) - arrow_schema = _bq_to_arrow_schema(SCALAR_COLUMNS) - read_session = _generate_arrow_read_session(arrow_schema) - arrow_batches = _bq_to_arrow_batches(SCALAR_BLOCKS, arrow_schema) - reader = class_under_test( - arrow_batches, mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - - with pytest.raises(ImportError): - reader.to_arrow(read_session) - - with pytest.raises(ImportError): - reader.rows(read_session).to_arrow() - - with pytest.raises(ImportError): - next(reader.rows(read_session).pages).to_arrow() - - -def test_to_arrow_w_scalars_arrow(class_under_test): - arrow_schema = _bq_to_arrow_schema(SCALAR_COLUMNS) - read_session = _generate_arrow_read_session(arrow_schema) - arrow_batches = _bq_to_arrow_batches(SCALAR_BLOCKS, arrow_schema) - reader = class_under_test( - arrow_batches, mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - actual_table = reader.to_arrow(read_session) - expected_table = pyarrow.Table.from_batches( - _bq_to_arrow_batch_objects(SCALAR_BLOCKS, arrow_schema) - ) - assert actual_table == expected_table - - -def test_to_dataframe_no_pandas_raises_import_error( - mut, class_under_test, mock_client, monkeypatch -): - monkeypatch.setattr(mut, "pandas", None) - avro_schema = _bq_to_avro_schema(SCALAR_COLUMNS) - read_session = _generate_avro_read_session(avro_schema) - avro_blocks = _bq_to_avro_blocks(SCALAR_BLOCKS, avro_schema) - - reader = class_under_test( - avro_blocks, mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - - with pytest.raises(ImportError): - reader.to_dataframe(read_session) - - with pytest.raises(ImportError): - reader.rows(read_session).to_dataframe() - - with pytest.raises(ImportError): - next(reader.rows(read_session).pages).to_dataframe() - - -def test_to_dataframe_no_schema_set_raises_type_error( - mut, class_under_test, mock_client, monkeypatch -): - reader = class_under_test( - [], mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - read_session = bigquery_storage_v1beta1.types.ReadSession() - - with pytest.raises(TypeError): - reader.to_dataframe(read_session) - - -def test_to_dataframe_w_scalars(class_under_test): - avro_schema = _bq_to_avro_schema(SCALAR_COLUMNS) - read_session = _generate_avro_read_session(avro_schema) - avro_blocks = _bq_to_avro_blocks(SCALAR_BLOCKS, avro_schema) - - reader = class_under_test( - avro_blocks, mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - got = reader.to_dataframe(read_session) - - expected = pandas.DataFrame( - list(itertools.chain.from_iterable(SCALAR_BLOCKS)), columns=SCALAR_COLUMN_NAMES - ) - # fastavro provides its own UTC definition, so - # compare the timestamp columns separately. - got_ts = got["ts_col"] - got = got.drop(columns=["ts_col"]) - expected_ts = expected["ts_col"] - expected = expected.drop(columns=["ts_col"]) - - pandas.testing.assert_frame_equal( - got.reset_index(drop=True), # reset_index to ignore row labels - expected.reset_index(drop=True), - ) - pandas.testing.assert_series_equal( - got_ts.reset_index(drop=True), - expected_ts.reset_index(drop=True), - check_dtype=False, # fastavro's UTC means different dtype - check_datetimelike_compat=True, - ) - - -def test_to_dataframe_w_scalars_arrow(class_under_test): - arrow_schema = _bq_to_arrow_schema(SCALAR_COLUMNS) - read_session = _generate_arrow_read_session(arrow_schema) - arrow_batches = _bq_to_arrow_batches(SCALAR_BLOCKS, arrow_schema) - - reader = class_under_test( - arrow_batches, mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - got = reader.to_dataframe(read_session) - - expected = pandas.DataFrame( - list(itertools.chain.from_iterable(SCALAR_BLOCKS)), columns=SCALAR_COLUMN_NAMES - ) - - pandas.testing.assert_frame_equal( - got.reset_index(drop=True), # reset_index to ignore row labels - expected.reset_index(drop=True), - ) - - -def test_to_dataframe_w_dtypes(class_under_test): - avro_schema = _bq_to_avro_schema( - [ - {"name": "bigfloat", "type": "float64"}, - {"name": "lilfloat", "type": "float64"}, - ] - ) - read_session = _generate_avro_read_session(avro_schema) - blocks = [ - [{"bigfloat": 1.25, "lilfloat": 30.5}, {"bigfloat": 2.5, "lilfloat": 21.125}], - [{"bigfloat": 3.75, "lilfloat": 11.0}], - ] - avro_blocks = _bq_to_avro_blocks(blocks, avro_schema) - - reader = class_under_test( - avro_blocks, mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - got = reader.to_dataframe(read_session, dtypes={"lilfloat": "float16"}) - - expected = pandas.DataFrame( - { - "bigfloat": [1.25, 2.5, 3.75], - "lilfloat": pandas.Series([30.5, 21.125, 11.0], dtype="float16"), - }, - columns=["bigfloat", "lilfloat"], - ) - pandas.testing.assert_frame_equal( - got.reset_index(drop=True), # reset_index to ignore row labels - expected.reset_index(drop=True), - ) - - -def test_to_dataframe_w_dtypes_arrow(class_under_test): - arrow_schema = _bq_to_arrow_schema( - [ - {"name": "bigfloat", "type": "float64"}, - {"name": "lilfloat", "type": "float64"}, - ] - ) - read_session = _generate_arrow_read_session(arrow_schema) - blocks = [ - [{"bigfloat": 1.25, "lilfloat": 30.5}, {"bigfloat": 2.5, "lilfloat": 21.125}], - [{"bigfloat": 3.75, "lilfloat": 11.0}], - ] - arrow_batches = _bq_to_arrow_batches(blocks, arrow_schema) - - reader = class_under_test( - arrow_batches, mock_client, bigquery_storage_v1beta1.types.StreamPosition(), {} - ) - got = reader.to_dataframe(read_session, dtypes={"lilfloat": "float16"}) - - expected = pandas.DataFrame( - { - "bigfloat": [1.25, 2.5, 3.75], - "lilfloat": pandas.Series([30.5, 21.125, 11.0], dtype="float16"), - }, - columns=["bigfloat", "lilfloat"], - ) - pandas.testing.assert_frame_equal( - got.reset_index(drop=True), # reset_index to ignore row labels - expected.reset_index(drop=True), - ) - - -def test_to_dataframe_by_page(class_under_test, mock_client): - bq_columns = [ - {"name": "int_col", "type": "int64"}, - {"name": "bool_col", "type": "bool"}, - ] - avro_schema = _bq_to_avro_schema(bq_columns) - read_session = _generate_avro_read_session(avro_schema) - block_1 = [{"int_col": 123, "bool_col": True}, {"int_col": 234, "bool_col": False}] - block_2 = [{"int_col": 345, "bool_col": True}, {"int_col": 456, "bool_col": False}] - block_3 = [{"int_col": 567, "bool_col": True}, {"int_col": 789, "bool_col": False}] - block_4 = [{"int_col": 890, "bool_col": True}] - # Break blocks into two groups to test that iteration continues across - # reconnection. - bq_blocks_1 = [block_1, block_2] - bq_blocks_2 = [block_3, block_4] - avro_blocks_1 = _bq_to_avro_blocks(bq_blocks_1, avro_schema) - avro_blocks_2 = _bq_to_avro_blocks(bq_blocks_2, avro_schema) - - mock_client.read_rows.return_value = avro_blocks_2 - stream_position = bigquery_storage_v1beta1.types.StreamPosition( - stream={"name": "test"} - ) - - reader = class_under_test( - _pages_w_unavailable(avro_blocks_1), - mock_client, - stream_position, - {"metadata": {"test-key": "test-value"}}, - ) - got = reader.rows(read_session) - pages = iter(got.pages) - - page_1 = next(pages) - pandas.testing.assert_frame_equal( - page_1.to_dataframe().reset_index(drop=True), - pandas.DataFrame(block_1, columns=["int_col", "bool_col"]).reset_index( - drop=True - ), - ) - - page_2 = next(pages) - pandas.testing.assert_frame_equal( - page_2.to_dataframe().reset_index(drop=True), - pandas.DataFrame(block_2, columns=["int_col", "bool_col"]).reset_index( - drop=True - ), - ) - - page_3 = next(pages) - pandas.testing.assert_frame_equal( - page_3.to_dataframe().reset_index(drop=True), - pandas.DataFrame(block_3, columns=["int_col", "bool_col"]).reset_index( - drop=True - ), - ) - - page_4 = next(pages) - pandas.testing.assert_frame_equal( - page_4.to_dataframe().reset_index(drop=True), - pandas.DataFrame(block_4, columns=["int_col", "bool_col"]).reset_index( - drop=True - ), - ) - - -def test_to_dataframe_by_page_arrow(class_under_test, mock_client): - bq_columns = [ - {"name": "int_col", "type": "int64"}, - {"name": "bool_col", "type": "bool"}, - ] - arrow_schema = _bq_to_arrow_schema(bq_columns) - read_session = _generate_arrow_read_session(arrow_schema) - - bq_block_1 = [ - {"int_col": 123, "bool_col": True}, - {"int_col": 234, "bool_col": False}, - ] - bq_block_2 = [ - {"int_col": 345, "bool_col": True}, - {"int_col": 456, "bool_col": False}, - ] - bq_block_3 = [ - {"int_col": 567, "bool_col": True}, - {"int_col": 789, "bool_col": False}, - ] - bq_block_4 = [{"int_col": 890, "bool_col": True}] - # Break blocks into two groups to test that iteration continues across - # reconnection. - bq_blocks_1 = [bq_block_1, bq_block_2] - bq_blocks_2 = [bq_block_3, bq_block_4] - batch_1 = _bq_to_arrow_batches(bq_blocks_1, arrow_schema) - batch_2 = _bq_to_arrow_batches(bq_blocks_2, arrow_schema) - - mock_client.read_rows.return_value = batch_2 - - reader = class_under_test( - _pages_w_unavailable(batch_1), - mock_client, - bigquery_storage_v1beta1.types.StreamPosition(), - {}, - ) - got = reader.rows(read_session) - pages = iter(got.pages) - - page_1 = next(pages) - pandas.testing.assert_frame_equal( - page_1.to_dataframe( - dtypes={"int_col": "int64", "bool_col": "bool"} - ).reset_index(drop=True), - pandas.DataFrame(bq_block_1, columns=["int_col", "bool_col"]).reset_index( - drop=True - ), - ) - - page_2 = next(pages) - pandas.testing.assert_frame_equal( - page_2.to_dataframe().reset_index(drop=True), - pandas.DataFrame(bq_block_2, columns=["int_col", "bool_col"]).reset_index( - drop=True - ), - ) - - page_3 = next(pages) - pandas.testing.assert_frame_equal( - page_3.to_dataframe().reset_index(drop=True), - pandas.DataFrame(bq_block_3, columns=["int_col", "bool_col"]).reset_index( - drop=True - ), - ) - - page_4 = next(pages) - pandas.testing.assert_frame_equal( - page_4.to_dataframe().reset_index(drop=True), - pandas.DataFrame(bq_block_4, columns=["int_col", "bool_col"]).reset_index( - drop=True - ), - ) - - -def test_copy_stream_position(mut): - read_position = bigquery_storage_v1beta1.types.StreamPosition( - stream={"name": "test"}, offset=41 - ) - got = mut._copy_stream_position(read_position) - assert got == read_position - got.offset = 42 - assert read_position.offset == 41 - - -def test_copy_stream_position_w_dict(mut): - read_position = {"stream": {"name": "test"}, "offset": 42} - got = mut._copy_stream_position(read_position) - assert got.stream.name == "test" - assert got.offset == 42 diff --git a/dataproc/.coveragerc b/dataproc/.coveragerc deleted file mode 100644 index b178b094aa1d..000000000000 --- a/dataproc/.coveragerc +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[run] -branch = True - -[report] -fail_under = 100 -show_missing = True -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file diff --git a/dataproc/.flake8 b/dataproc/.flake8 deleted file mode 100644 index 0268ecc9c55c..000000000000 --- a/dataproc/.flake8 +++ /dev/null @@ -1,14 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - *_pb2.py - - # Standard linting exemptions. - __pycache__, - .git, - *.pyc, - conf.py diff --git a/dataproc/.repo-metadata.json b/dataproc/.repo-metadata.json deleted file mode 100644 index c7d1f8c00a9b..000000000000 --- a/dataproc/.repo-metadata.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "dataproc", - "name_pretty": "Google Cloud Dataproc", - "product_documentation": "https://cloud.google.com/dataproc", - "client_documentation": "https://googleapis.dev/python/dataproc/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559745", - "release_level": "alpha", - "language": "python", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-dataproc", - "api_id": "dataproc.googleapis.com", - "requires_billing": true -} \ No newline at end of file diff --git a/dataproc/CHANGELOG.md b/dataproc/CHANGELOG.md deleted file mode 100644 index 77f57b9cd8de..000000000000 --- a/dataproc/CHANGELOG.md +++ /dev/null @@ -1,158 +0,0 @@ -# Changelog - -[PyPI History][1] - -[1]: https://pypi.org/project/google-cloud-dataproc/#history - -## 0.6.1 - -11-12-2019 08:24 PST - -### Documentation -- Add python 2 sunset banner to documentation. ([#9036](https://github.com/googleapis/google-cloud-python/pull/9036)) - -## 0.6.0 - -11-07-2019 16:34 PST - - -### Implementation Changes -- Tweak proto annotations (via synth). ([#9466](https://github.com/googleapis/google-cloud-python/pull/9466)) -- Remove send/recv msg size limit (via synth). ([#8951](https://github.com/googleapis/google-cloud-python/pull/8951)) - -### New Features -- Add V1 autoscaling policy support; annotate protos (via synth). ([#9402](https://github.com/googleapis/google-cloud-python/pull/9402)) - -### Documentation -- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) -- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) -- Remove compatibility badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) -- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) - -## 0.5.0 - -07-24-2019 16:02 PDT - -### Implementation Changes -- Allow kwargs to be passed to create_channel (via synth). ([#8387](https://github.com/googleapis/google-cloud-python/pull/8387)) - -### New Features -- Add 'client_options' support, update list method docstrings (via synth). ([#8505](https://github.com/googleapis/google-cloud-python/pull/8505)) - -### Dependencies -- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) - -### Documentation -- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) -- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) - -### Internal / Testing Changes -- Pin black version (via synth). ([#8579](https://github.com/googleapis/google-cloud-python/pull/8579)) -- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) -- Declare encoding as utf-8 in pb2 files (via synth). ([#8349](https://github.com/googleapis/google-cloud-python/pull/8349)) -- Add disclaimer to auto-generated template files (via synth). ([#8311](https://github.com/googleapis/google-cloud-python/pull/8311)) -- Supress checking 'cov-fail-under' in nox default session (via synth). ([#8237](https://github.com/googleapis/google-cloud-python/pull/8237)) - -## 0.4.0 - -05-30-2019 05:52 PDT - -### Implementation Changes -- Update docs/conf.py, add routing header to method metadata, fix docstrings (via synth). ([#7924](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7924)) - -### New Features -- Add new service features for v1, including autoscaling (via synth). ([#8152](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8152)) -- Add new service features for v1beta2, including autoscaling (via synth). ([#8119](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8119)) - -### Documentation -- Add nox session `docs` ([#7429](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7429)) -- Add clarifying comment to blacken nox target. ([#7388](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7388)) - -### Internal / Testing Changes -- Re-add import of 'operations.proto' to V1 'clusters.proto' (via synth). ([#8188](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8188)) -- Add empty lines (via synth). ([#8054](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8054)) -- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7535)) -- Copy lintified proto files (via synth). ([#7465](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7465)) - -## 0.3.1 - -02-15-2019 12:36 PST - - -### Implementation Changes -- Remove unused message exports. ([#7266](https://github.com/googleapis/google-cloud-python/pull/7266)) -- Protoc-generated serialization update.. ([#7079](https://github.com/googleapis/google-cloud-python/pull/7079)) -- Trivial housekeeping change to .proto files. ([#7067](https://github.com/googleapis/google-cloud-python/pull/7067)) - -### Documentation -- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) -- Pick up stub docstring fix in GAPIC generator. ([#6967](https://github.com/googleapis/google-cloud-python/pull/6967)) - -### Internal / Testing Changes -- Copy proto files alongside protoc versions. -- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) -- Update copyright headers - -## 0.3.0 - -12-17-2018 18:20 PST - - -### Implementation Changes -- Import `iam.policy` from `google.api_core`. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) -- Update `cluster_controller_client` GAPIC config (via synth). ([#6659](https://github.com/googleapis/google-cloud-python/pull/6659)) -- Add 'WorkflowTemplateServiceClient', optional args; update timeouts (via synth). ([#6655](https://github.com/googleapis/google-cloud-python/pull/6655)) -- Pick up enum fixes in the GAPIC generator. ([#6609](https://github.com/googleapis/google-cloud-python/pull/6609)) -- Pick up fixes in GAPIC generator. ([#6493](https://github.com/googleapis/google-cloud-python/pull/6493)) -- Fix client_info bug, update docstrings. ([#6408](https://github.com/googleapis/google-cloud-python/pull/6408)) -- Re-generate library using dataproc/synth.py ([#6056](https://github.com/googleapis/google-cloud-python/pull/6056)) -- Re-generate library using dataproc/synth.py ([#5975](https://github.com/googleapis/google-cloud-python/pull/5975)) -- Re-generate library using dataproc/synth.py ([#5949](https://github.com/googleapis/google-cloud-python/pull/5949)) - -### Dependencies -- Bump minimum `api_core` version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391)) - -### Documentation -- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) -- Update Dataproc docs URL ([#6455](https://github.com/googleapis/google-cloud-python/pull/6455)) -- Docs: fix GAX fossils ([#6264](https://github.com/googleapis/google-cloud-python/pull/6264)) -- Docs: normalize use of support level badges ([#6159](https://github.com/googleapis/google-cloud-python/pull/6159)) -- Dataproc: harmonize / DRY 'README.rst' / 'docs/index.rst'. ([#6019](https://github.com/googleapis/google-cloud-python/pull/6019)) - -### Internal / Testing Changes -- Update noxfile. -- Blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) -- Omit local deps ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) -- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) -- Unblack dataproc gapic and protos. -- Run Black on Generated libraries ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) -- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) -- Add synth metadata. ([#6563](https://github.com/googleapis/google-cloud-python/pull/6563)) -- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) - -## 0.2.0 - -### New Features -- Regenerate v1 endpoint. Add v1beta2 endpoint (#5717) - -## 0.1.2 - -### Implementation Changes -- Avoid overwriting '__module__' of messages from shared modules. (#5364) - -### Internal / Testing Changes -- Modify system tests to use prerelease versions of grpcio (#5304) -- Add Test runs for Python 3.7 and remove 3.4 (#5295) -- Re-enable lint for tests, remove usage of pylint (#4921) - -## 0.1.1 - -### Dependencies - -- Update dependency range for api-core to include v1.0.0 releases (#4944) - -### Testing and internal changes - -- Re-enable lint for tests, remove usage of pylint (#4921) -- Normalize all setup.py files (#4909) - diff --git a/dataproc/LICENSE b/dataproc/LICENSE deleted file mode 100644 index a8ee855de2aa..000000000000 --- a/dataproc/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - https://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dataproc/MANIFEST.in b/dataproc/MANIFEST.in deleted file mode 100644 index 9cbf175afe6b..000000000000 --- a/dataproc/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include README.rst LICENSE -recursive-include google *.json *.proto -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/dataproc/README.rst b/dataproc/README.rst deleted file mode 100644 index c06fe058630e..000000000000 --- a/dataproc/README.rst +++ /dev/null @@ -1,110 +0,0 @@ -Python Client for Google Cloud Dataproc API -=========================================== - -|alpha| |pypi| |versions| - -`Google Cloud Dataproc API`_: Manages Hadoop-based clusters and jobs on Google Cloud Platform. - -- `Client Library Documentation`_ -- `Product Documentation`_ - -.. |alpha| image:: https://img.shields.io/badge/support-alpha-orange.svg - :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#alpha-support -.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-dataproc.svg - :target: https://pypi.org/project/google-cloud-dataproc/ -.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-dataproc.svg - :target: https://pypi.org/project/google-cloud-dataproc/ -.. _Google Cloud Dataproc API: https://cloud.google.com/dataproc -.. _Client Library Documentation: https://googleapis.dev/python/dataproc/latest -.. _Product Documentation: https://cloud.google.com/dataproc - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. `Enable the Google Cloud Dataproc API.`_ -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the Google Cloud Dataproc API.: https://cloud.google.com/dataproc -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Supported Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 - -Deprecated Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - source /bin/activate - /bin/pip install google-cloud-dataproc - - -Windows -^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-dataproc - -Example Usage -~~~~~~~~~~~~~ - -.. code:: py - - from google.cloud import dataproc_v1 - - client = dataproc_v1.ClusterControllerClient() - - project_id = '' - region = '' - - - # Iterate over all results - for element in client.list_clusters(project_id, region): - # process element - pass - - # Or iterate over results one page at a time - for page in client.list_clusters(project_id, region).pages: - for element in page: - # process element - pass - -Next Steps -~~~~~~~~~~ - -- Read the `Client Library Documentation`_ for Google Cloud Dataproc API - API to see other available methods on the client. -- Read the `Product documentation`_ to learn more about the product and see - How-to Guides. diff --git a/dataproc/docs/README.rst b/dataproc/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/dataproc/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/dataproc/docs/_static/custom.css b/dataproc/docs/_static/custom.css deleted file mode 100644 index 0abaf229fce3..000000000000 --- a/dataproc/docs/_static/custom.css +++ /dev/null @@ -1,4 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} \ No newline at end of file diff --git a/dataproc/docs/_templates/layout.html b/dataproc/docs/_templates/layout.html deleted file mode 100644 index 228529efe2d2..000000000000 --- a/dataproc/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/dataproc/docs/changelog.md b/dataproc/docs/changelog.md deleted file mode 120000 index 04c99a55caae..000000000000 --- a/dataproc/docs/changelog.md +++ /dev/null @@ -1 +0,0 @@ -../CHANGELOG.md \ No newline at end of file diff --git a/dataproc/docs/conf.py b/dataproc/docs/conf.py deleted file mode 100644 index f4cff2d02d3a..000000000000 --- a/dataproc/docs/conf.py +++ /dev/null @@ -1,363 +0,0 @@ -# -*- coding: utf-8 -*- -# -# google-cloud-dataproc documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-dataproc" -copyright = u"2017, Google" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-dataproc-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - #'preamble': '', - # Latex figure (float) alignment - #'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-dataproc.tex", - u"google-cloud-dataproc Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-dataproc", - u"google-cloud-dataproc Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-dataproc", - u"google-cloud-dataproc Documentation", - author, - "google-cloud-dataproc", - "GAPIC library for the {metadata.shortName} v1 service", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/master/", None), - "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), - "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/dataproc/docs/gapic/v1/api.rst b/dataproc/docs/gapic/v1/api.rst deleted file mode 100644 index 633bbed170ea..000000000000 --- a/dataproc/docs/gapic/v1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Google Cloud Dataproc API -==================================== - -.. automodule:: google.cloud.dataproc_v1 - :members: - :inherited-members: \ No newline at end of file diff --git a/dataproc/docs/gapic/v1/types.rst b/dataproc/docs/gapic/v1/types.rst deleted file mode 100644 index 0dfeadcfbaa7..000000000000 --- a/dataproc/docs/gapic/v1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Google Cloud Dataproc API Client -========================================== - -.. automodule:: google.cloud.dataproc_v1.types - :members: \ No newline at end of file diff --git a/dataproc/docs/gapic/v1beta2/api.rst b/dataproc/docs/gapic/v1beta2/api.rst deleted file mode 100644 index bac320b8e069..000000000000 --- a/dataproc/docs/gapic/v1beta2/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Google Cloud Dataproc API -==================================== - -.. automodule:: google.cloud.dataproc_v1beta2 - :members: - :inherited-members: \ No newline at end of file diff --git a/dataproc/docs/gapic/v1beta2/types.rst b/dataproc/docs/gapic/v1beta2/types.rst deleted file mode 100644 index 5bed9e2cd25c..000000000000 --- a/dataproc/docs/gapic/v1beta2/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Google Cloud Dataproc API Client -========================================== - -.. automodule:: google.cloud.dataproc_v1beta2.types - :members: \ No newline at end of file diff --git a/dataproc/docs/index.rst b/dataproc/docs/index.rst deleted file mode 100644 index d4dd5e345115..000000000000 --- a/dataproc/docs/index.rst +++ /dev/null @@ -1,22 +0,0 @@ -.. include:: README.rst - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - gapic/v1/api - gapic/v1/types - gapic/v1beta2/api - gapic/v1beta2/types - - -Changelog ---------- - -For a list of all ``google-cloud-dataproc`` releases: - -.. toctree:: - :maxdepth: 2 - - changelog diff --git a/dataproc/google/__init__.py b/dataproc/google/__init__.py deleted file mode 100644 index 9a1b64a6d586..000000000000 --- a/dataproc/google/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/dataproc/google/cloud/__init__.py b/dataproc/google/cloud/__init__.py deleted file mode 100644 index 9a1b64a6d586..000000000000 --- a/dataproc/google/cloud/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/dataproc/google/cloud/dataproc.py b/dataproc/google/cloud/dataproc.py deleted file mode 100644 index 25b767956a0c..000000000000 --- a/dataproc/google/cloud/dataproc.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import - -from google.cloud.dataproc_v1 import ClusterControllerClient -from google.cloud.dataproc_v1 import JobControllerClient -from google.cloud.dataproc_v1 import WorkflowTemplateServiceClient -from google.cloud.dataproc_v1 import enums -from google.cloud.dataproc_v1 import types - - -__all__ = ( - "enums", - "types", - "ClusterControllerClient", - "JobControllerClient", - "WorkflowTemplateServiceClient", -) diff --git a/dataproc/google/cloud/dataproc_v1/__init__.py b/dataproc/google/cloud/dataproc_v1/__init__.py deleted file mode 100644 index 395e618f20f9..000000000000 --- a/dataproc/google/cloud/dataproc_v1/__init__.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys -import warnings - -from google.cloud.dataproc_v1 import types -from google.cloud.dataproc_v1.gapic import cluster_controller_client -from google.cloud.dataproc_v1.gapic import enums -from google.cloud.dataproc_v1.gapic import job_controller_client -from google.cloud.dataproc_v1.gapic import workflow_template_service_client - - -if sys.version_info[:2] == (2, 7): - message = ( - "A future version of this library will drop support for Python 2.7." - "More details about Python 2 support for Google Cloud Client Libraries" - "can be found at https://cloud.google.com/python/docs/python2-sunset/" - ) - warnings.warn(message, DeprecationWarning) - - -class ClusterControllerClient(cluster_controller_client.ClusterControllerClient): - __doc__ = cluster_controller_client.ClusterControllerClient.__doc__ - enums = enums - - -class JobControllerClient(job_controller_client.JobControllerClient): - __doc__ = job_controller_client.JobControllerClient.__doc__ - enums = enums - - -class WorkflowTemplateServiceClient( - workflow_template_service_client.WorkflowTemplateServiceClient -): - __doc__ = workflow_template_service_client.WorkflowTemplateServiceClient.__doc__ - enums = enums - - -__all__ = ( - "enums", - "types", - "ClusterControllerClient", - "JobControllerClient", - "WorkflowTemplateServiceClient", -) diff --git a/dataproc/google/cloud/dataproc_v1/gapic/__init__.py b/dataproc/google/cloud/dataproc_v1/gapic/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client.py b/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client.py deleted file mode 100644 index f849ff06dff5..000000000000 --- a/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client.py +++ /dev/null @@ -1,869 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.dataproc.v1 ClusterController API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.grpc_helpers -import google.api_core.operation -import google.api_core.operations_v1 -import google.api_core.page_iterator -import grpc - -from google.cloud.dataproc_v1.gapic import cluster_controller_client_config -from google.cloud.dataproc_v1.gapic import enums -from google.cloud.dataproc_v1.gapic.transports import cluster_controller_grpc_transport -from google.cloud.dataproc_v1.proto import clusters_pb2 -from google.cloud.dataproc_v1.proto import clusters_pb2_grpc -from google.cloud.dataproc_v1.proto import operations_pb2 as proto_operations_pb2 -from google.longrunning import operations_pb2 as longrunning_operations_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version - - -class ClusterControllerClient(object): - """ - The ClusterControllerService provides methods to manage clusters - of Compute Engine instances. - """ - - SERVICE_ADDRESS = "dataproc.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.dataproc.v1.ClusterController" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClusterControllerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.ClusterControllerGrpcTransport, - Callable[[~.Credentials, type], ~.ClusterControllerGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = cluster_controller_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=cluster_controller_grpc_transport.ClusterControllerGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = cluster_controller_grpc_transport.ClusterControllerGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_cluster( - self, - project_id, - region, - cluster, - request_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a cluster in a project. The returned ``Operation.metadata`` will - be - `ClusterOperationMetadata `__. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.ClusterControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `cluster`: - >>> cluster = {} - >>> - >>> response = client.create_cluster(project_id, region, cluster) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - region (str): Required. The Dataproc region in which to handle the request. - cluster (Union[dict, ~google.cloud.dataproc_v1.types.Cluster]): Required. The cluster to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1.types.Cluster` - request_id (str): Optional. A unique id used to identify the request. If the server - receives two ``CreateClusterRequest`` requests with the same id, then - the second request will be ignored and the first - ``google.longrunning.Operation`` created and stored in the backend is - returned. - - It is recommended to always set this value to a - `UUID `__. - - The id must contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). The maximum length is 40 characters. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_cluster" not in self._inner_api_calls: - self._inner_api_calls[ - "create_cluster" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_cluster, - default_retry=self._method_configs["CreateCluster"].retry, - default_timeout=self._method_configs["CreateCluster"].timeout, - client_info=self._client_info, - ) - - request = clusters_pb2.CreateClusterRequest( - project_id=project_id, region=region, cluster=cluster, request_id=request_id - ) - operation = self._inner_api_calls["create_cluster"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - clusters_pb2.Cluster, - metadata_type=proto_operations_pb2.ClusterOperationMetadata, - ) - - def update_cluster( - self, - project_id, - region, - cluster_name, - cluster, - update_mask, - graceful_decommission_timeout=None, - request_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a cluster in a project. The returned ``Operation.metadata`` will - be - `ClusterOperationMetadata `__. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.ClusterControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `cluster_name`: - >>> cluster_name = '' - >>> - >>> # TODO: Initialize `cluster`: - >>> cluster = {} - >>> - >>> # TODO: Initialize `update_mask`: - >>> update_mask = {} - >>> - >>> response = client.update_cluster(project_id, region, cluster_name, cluster, update_mask) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project the - cluster belongs to. - region (str): Required. The Dataproc region in which to handle the request. - cluster_name (str): Required. The cluster name. - cluster (Union[dict, ~google.cloud.dataproc_v1.types.Cluster]): Required. The changes to the cluster. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1.types.Cluster` - update_mask (Union[dict, ~google.cloud.dataproc_v1.types.FieldMask]): Required. Specifies the path, relative to ``Cluster``, of the field to - update. For example, to change the number of workers in a cluster to 5, - the ``update_mask`` parameter would be specified as - ``config.worker_config.num_instances``, and the ``PATCH`` request body - would specify the new value, as follows: - - :: - - { - "config":{ - "workerConfig":{ - "numInstances":"5" - } - } - } - - Similarly, to change the number of preemptible workers in a cluster to - 5, the ``update_mask`` parameter would be - ``config.secondary_worker_config.num_instances``, and the ``PATCH`` - request body would be set as follows: - - :: - - { - "config":{ - "secondaryWorkerConfig":{ - "numInstances":"5" - } - } - } - - Note: Currently, only the following fields can be updated: - - .. raw:: html - - - - - - - - - - - - - - - - - - - - - - - -
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or - change autoscaling policies
- - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1.types.FieldMask` - graceful_decommission_timeout (Union[dict, ~google.cloud.dataproc_v1.types.Duration]): Optional. Timeout for graceful YARN decomissioning. Graceful - decommissioning allows removing nodes from the cluster without - interrupting jobs in progress. Timeout specifies how long to wait for jobs - in progress to finish before forcefully removing nodes (and potentially - interrupting jobs). Default timeout is 0 (for forceful decommission), and - the maximum allowed timeout is 1 day. - - Only supported on Dataproc image versions 1.2 and higher. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1.types.Duration` - request_id (str): Optional. A unique id used to identify the request. If the server - receives two ``UpdateClusterRequest`` requests with the same id, then - the second request will be ignored and the first - ``google.longrunning.Operation`` created and stored in the backend is - returned. - - It is recommended to always set this value to a - `UUID `__. - - The id must contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). The maximum length is 40 characters. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_cluster" not in self._inner_api_calls: - self._inner_api_calls[ - "update_cluster" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_cluster, - default_retry=self._method_configs["UpdateCluster"].retry, - default_timeout=self._method_configs["UpdateCluster"].timeout, - client_info=self._client_info, - ) - - request = clusters_pb2.UpdateClusterRequest( - project_id=project_id, - region=region, - cluster_name=cluster_name, - cluster=cluster, - update_mask=update_mask, - graceful_decommission_timeout=graceful_decommission_timeout, - request_id=request_id, - ) - operation = self._inner_api_calls["update_cluster"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - clusters_pb2.Cluster, - metadata_type=proto_operations_pb2.ClusterOperationMetadata, - ) - - def delete_cluster( - self, - project_id, - region, - cluster_name, - cluster_uuid=None, - request_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a cluster in a project. The returned ``Operation.metadata`` will - be - `ClusterOperationMetadata `__. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.ClusterControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `cluster_name`: - >>> cluster_name = '' - >>> - >>> response = client.delete_cluster(project_id, region, cluster_name) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - region (str): Required. The Dataproc region in which to handle the request. - cluster_name (str): Required. The cluster name. - cluster_uuid (str): Optional. Specifying the ``cluster_uuid`` means the RPC should fail - (with error NOT\_FOUND) if cluster with specified UUID does not exist. - request_id (str): Optional. A unique id used to identify the request. If the server - receives two ``DeleteClusterRequest`` requests with the same id, then - the second request will be ignored and the first - ``google.longrunning.Operation`` created and stored in the backend is - returned. - - It is recommended to always set this value to a - `UUID `__. - - The id must contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). The maximum length is 40 characters. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_cluster" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_cluster" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_cluster, - default_retry=self._method_configs["DeleteCluster"].retry, - default_timeout=self._method_configs["DeleteCluster"].timeout, - client_info=self._client_info, - ) - - request = clusters_pb2.DeleteClusterRequest( - project_id=project_id, - region=region, - cluster_name=cluster_name, - cluster_uuid=cluster_uuid, - request_id=request_id, - ) - operation = self._inner_api_calls["delete_cluster"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=proto_operations_pb2.ClusterOperationMetadata, - ) - - def get_cluster( - self, - project_id, - region, - cluster_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the resource representation for a cluster in a project. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.ClusterControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `cluster_name`: - >>> cluster_name = '' - >>> - >>> response = client.get_cluster(project_id, region, cluster_name) - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - region (str): Required. The Dataproc region in which to handle the request. - cluster_name (str): Required. The cluster name. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types.Cluster` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_cluster" not in self._inner_api_calls: - self._inner_api_calls[ - "get_cluster" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_cluster, - default_retry=self._method_configs["GetCluster"].retry, - default_timeout=self._method_configs["GetCluster"].timeout, - client_info=self._client_info, - ) - - request = clusters_pb2.GetClusterRequest( - project_id=project_id, region=region, cluster_name=cluster_name - ) - return self._inner_api_calls["get_cluster"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_clusters( - self, - project_id, - region, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists all regions/{region}/clusters in a project. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.ClusterControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # Iterate over all results - >>> for element in client.list_clusters(project_id, region): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_clusters(project_id, region).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - region (str): Required. The Dataproc region in which to handle the request. - filter_ (str): Optional. A filter constraining the clusters to list. Filters are - case-sensitive and have the following syntax: - - field = value [AND [field = value]] ... - - where **field** is one of ``status.state``, ``clusterName``, or - ``labels.[KEY]``, and ``[KEY]`` is a label key. **value** can be ``*`` - to match all values. ``status.state`` can be one of the following: - ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, ``ERROR``, - ``DELETING``, or ``UPDATING``. ``ACTIVE`` contains the ``CREATING``, - ``UPDATING``, and ``RUNNING`` states. ``INACTIVE`` contains the - ``DELETING`` and ``ERROR`` states. ``clusterName`` is the name of the - cluster provided at creation time. Only the logical ``AND`` operator is - supported; space-separated items are treated as having an implicit - ``AND`` operator. - - Example filter: - - status.state = ACTIVE AND clusterName = mycluster AND labels.env = - staging AND labels.starred = \* - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.dataproc_v1.types.Cluster` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_clusters" not in self._inner_api_calls: - self._inner_api_calls[ - "list_clusters" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_clusters, - default_retry=self._method_configs["ListClusters"].retry, - default_timeout=self._method_configs["ListClusters"].timeout, - client_info=self._client_info, - ) - - request = clusters_pb2.ListClustersRequest( - project_id=project_id, region=region, filter=filter_, page_size=page_size - ) - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_clusters"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="clusters", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def diagnose_cluster( - self, - project_id, - region, - cluster_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets cluster diagnostic information. The returned ``Operation.metadata`` - will be - `ClusterOperationMetadata `__. - After the operation completes, ``Operation.response`` contains - `DiagnoseClusterResults `__. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.ClusterControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `cluster_name`: - >>> cluster_name = '' - >>> - >>> response = client.diagnose_cluster(project_id, region, cluster_name) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - region (str): Required. The Dataproc region in which to handle the request. - cluster_name (str): Required. The cluster name. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "diagnose_cluster" not in self._inner_api_calls: - self._inner_api_calls[ - "diagnose_cluster" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.diagnose_cluster, - default_retry=self._method_configs["DiagnoseCluster"].retry, - default_timeout=self._method_configs["DiagnoseCluster"].timeout, - client_info=self._client_info, - ) - - request = clusters_pb2.DiagnoseClusterRequest( - project_id=project_id, region=region, cluster_name=cluster_name - ) - operation = self._inner_api_calls["diagnose_cluster"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=clusters_pb2.DiagnoseClusterResults, - ) diff --git a/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client_config.py b/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client_config.py deleted file mode 100644 index 02c0cb49d099..000000000000 --- a/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client_config.py +++ /dev/null @@ -1,53 +0,0 @@ -config = { - "interfaces": { - "google.cloud.dataproc.v1.ClusterController": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": ["UNAVAILABLE"], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 10000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 10000, - "total_timeout_millis": 300000, - } - }, - "methods": { - "CreateCluster": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateCluster": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteCluster": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetCluster": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListClusters": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DiagnoseCluster": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/dataproc/google/cloud/dataproc_v1/gapic/enums.py b/dataproc/google/cloud/dataproc_v1/gapic/enums.py deleted file mode 100644 index 9bbaf2a63b3c..000000000000 --- a/dataproc/google/cloud/dataproc_v1/gapic/enums.py +++ /dev/null @@ -1,277 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class Component(enum.IntEnum): - """ - Cluster components that can be activated. - - Attributes: - COMPONENT_UNSPECIFIED (int): Unspecified component. - ANACONDA (int): The Anaconda python distribution. - HIVE_WEBHCAT (int): The Hive Web HCatalog (the REST service for accessing HCatalog). - JUPYTER (int): The Jupyter Notebook. - ZEPPELIN (int): The Zeppelin notebook. - """ - - COMPONENT_UNSPECIFIED = 0 - ANACONDA = 5 - HIVE_WEBHCAT = 3 - JUPYTER = 1 - ZEPPELIN = 4 - - -class ClusterOperationStatus(object): - class State(enum.IntEnum): - """ - The operation state. - - Attributes: - UNKNOWN (int): Unused. - PENDING (int): The operation has been created. - RUNNING (int): The operation is running. - DONE (int): The operation is done; either cancelled or completed. - """ - - UNKNOWN = 0 - PENDING = 1 - RUNNING = 2 - DONE = 3 - - -class ClusterStatus(object): - class State(enum.IntEnum): - """ - The cluster state. - - Attributes: - UNKNOWN (int): The cluster state is unknown. - CREATING (int): The cluster is being created and set up. It is not ready for use. - RUNNING (int): The cluster is currently running and healthy. It is ready for use. - ERROR (int): The cluster encountered an error. It is not ready for use. - DELETING (int): The cluster is being deleted. It cannot be used. - UPDATING (int): The cluster is being updated. It continues to accept and process jobs. - """ - - UNKNOWN = 0 - CREATING = 1 - RUNNING = 2 - ERROR = 3 - DELETING = 4 - UPDATING = 5 - - class Substate(enum.IntEnum): - """ - The cluster substate. - - Attributes: - UNSPECIFIED (int): The cluster substate is unknown. - UNHEALTHY (int): The cluster is known to be in an unhealthy state - (for example, critical daemons are not running or HDFS capacity is - exhausted). - - Applies to RUNNING state. - STALE_STATUS (int): The agent-reported status is out of date (may occur if - Dataproc loses communication with Agent). - - Applies to RUNNING state. - """ - - UNSPECIFIED = 0 - UNHEALTHY = 1 - STALE_STATUS = 2 - - -class JobStatus(object): - class State(enum.IntEnum): - """ - The job state. - - Attributes: - STATE_UNSPECIFIED (int): The job state is unknown. - PENDING (int): The job is pending; it has been submitted, but is not yet running. - SETUP_DONE (int): Job has been received by the service and completed initial setup; - it will soon be submitted to the cluster. - RUNNING (int): The job is running on the cluster. - CANCEL_PENDING (int): A CancelJob request has been received, but is pending. - CANCEL_STARTED (int): Transient in-flight resources have been canceled, and the request to - cancel the running job has been issued to the cluster. - CANCELLED (int): The job cancellation was successful. - DONE (int): The job has completed successfully. - ERROR (int): The job has completed, but encountered an error. - ATTEMPT_FAILURE (int): Job attempt has failed. The detail field contains failure details for - this attempt. - - Applies to restartable jobs only. - """ - - STATE_UNSPECIFIED = 0 - PENDING = 1 - SETUP_DONE = 8 - RUNNING = 2 - CANCEL_PENDING = 3 - CANCEL_STARTED = 7 - CANCELLED = 4 - DONE = 5 - ERROR = 6 - ATTEMPT_FAILURE = 9 - - class Substate(enum.IntEnum): - """ - The job substate. - - Attributes: - UNSPECIFIED (int): The job substate is unknown. - SUBMITTED (int): The Job is submitted to the agent. - - Applies to RUNNING state. - QUEUED (int): The Job has been received and is awaiting execution (it may be waiting - for a condition to be met). See the "details" field for the reason for - the delay. - - Applies to RUNNING state. - STALE_STATUS (int): The agent-reported status is out of date, which may be caused by a - loss of communication between the agent and Dataproc. If the - agent does not send a timely update, the job will fail. - - Applies to RUNNING state. - """ - - UNSPECIFIED = 0 - SUBMITTED = 1 - QUEUED = 2 - STALE_STATUS = 3 - - -class ListJobsRequest(object): - class JobStateMatcher(enum.IntEnum): - """ - A matcher that specifies categories of job states. - - Attributes: - ALL (int): Match all jobs, regardless of state. - ACTIVE (int): Only match jobs in non-terminal states: PENDING, RUNNING, or - CANCEL\_PENDING. - NON_ACTIVE (int): Only match jobs in terminal states: CANCELLED, DONE, or ERROR. - """ - - ALL = 0 - ACTIVE = 1 - NON_ACTIVE = 2 - - -class LoggingConfig(object): - class Level(enum.IntEnum): - """ - The Log4j level for job execution. When running an `Apache - Hive `__ job, Cloud Dataproc configures the - Hive client to an equivalent verbosity level. - - Attributes: - LEVEL_UNSPECIFIED (int): Level is unspecified. Use default level for log4j. - ALL (int): Use ALL level for log4j. - TRACE (int): Use TRACE level for log4j. - DEBUG (int): Use DEBUG level for log4j. - INFO (int): Use INFO level for log4j. - WARN (int): Use WARN level for log4j. - ERROR (int): Use ERROR level for log4j. - FATAL (int): Use FATAL level for log4j. - OFF (int): Turn off log4j. - """ - - LEVEL_UNSPECIFIED = 0 - ALL = 1 - TRACE = 2 - DEBUG = 3 - INFO = 4 - WARN = 5 - ERROR = 6 - FATAL = 7 - OFF = 8 - - -class WorkflowMetadata(object): - class State(enum.IntEnum): - """ - The operation state. - - Attributes: - UNKNOWN (int): Unused. - PENDING (int): The operation has been created. - RUNNING (int): The operation is running. - DONE (int): The operation is done; either cancelled or completed. - """ - - UNKNOWN = 0 - PENDING = 1 - RUNNING = 2 - DONE = 3 - - -class WorkflowNode(object): - class NodeState(enum.IntEnum): - """ - The workflow node state. - - Attributes: - NODE_STATE_UNSPECIFIED (int): State is unspecified. - BLOCKED (int): The node is awaiting prerequisite node to finish. - RUNNABLE (int): The node is runnable but not running. - RUNNING (int): The node is running. - COMPLETED (int): The node completed successfully. - FAILED (int): The node failed. A node can be marked FAILED because - its ancestor or peer failed. - """ - - NODE_STATE_UNSPECIFIED = 0 - BLOCKED = 1 - RUNNABLE = 2 - RUNNING = 3 - COMPLETED = 4 - FAILED = 5 - - -class YarnApplication(object): - class State(enum.IntEnum): - """ - The application state, corresponding to - YarnProtos.YarnApplicationStateProto. - - Attributes: - STATE_UNSPECIFIED (int): Status is unspecified. - NEW (int): Status is NEW. - NEW_SAVING (int): Status is NEW\_SAVING. - SUBMITTED (int): Status is SUBMITTED. - ACCEPTED (int): Status is ACCEPTED. - RUNNING (int): Status is RUNNING. - FINISHED (int): Status is FINISHED. - FAILED (int): Status is FAILED. - KILLED (int): Status is KILLED. - """ - - STATE_UNSPECIFIED = 0 - NEW = 1 - NEW_SAVING = 2 - SUBMITTED = 3 - ACCEPTED = 4 - RUNNING = 5 - FINISHED = 6 - FAILED = 7 - KILLED = 8 diff --git a/dataproc/google/cloud/dataproc_v1/gapic/job_controller_client.py b/dataproc/google/cloud/dataproc_v1/gapic/job_controller_client.py deleted file mode 100644 index 7752a4b79235..000000000000 --- a/dataproc/google/cloud/dataproc_v1/gapic/job_controller_client.py +++ /dev/null @@ -1,705 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.dataproc.v1 JobController API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import grpc - -from google.cloud.dataproc_v1.gapic import enums -from google.cloud.dataproc_v1.gapic import job_controller_client_config -from google.cloud.dataproc_v1.gapic.transports import job_controller_grpc_transport -from google.cloud.dataproc_v1.proto import clusters_pb2 -from google.cloud.dataproc_v1.proto import clusters_pb2_grpc -from google.cloud.dataproc_v1.proto import jobs_pb2 -from google.cloud.dataproc_v1.proto import jobs_pb2_grpc -from google.cloud.dataproc_v1.proto import operations_pb2 as proto_operations_pb2 -from google.longrunning import operations_pb2 as longrunning_operations_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version - - -class JobControllerClient(object): - """The JobController provides methods to manage jobs.""" - - SERVICE_ADDRESS = "dataproc.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.dataproc.v1.JobController" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - JobControllerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.JobControllerGrpcTransport, - Callable[[~.Credentials, type], ~.JobControllerGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = job_controller_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=job_controller_grpc_transport.JobControllerGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = job_controller_grpc_transport.JobControllerGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def submit_job( - self, - project_id, - region, - job, - request_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Submits a job to a cluster. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.JobControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `job`: - >>> job = {} - >>> - >>> response = client.submit_job(project_id, region, job) - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the job - belongs to. - region (str): Required. The Dataproc region in which to handle the request. - job (Union[dict, ~google.cloud.dataproc_v1.types.Job]): Required. The job resource. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1.types.Job` - request_id (str): Optional. A unique id used to identify the request. If the server - receives two ``SubmitJobRequest`` requests with the same id, then the - second request will be ignored and the first ``Job`` created and stored - in the backend is returned. - - It is recommended to always set this value to a - `UUID `__. - - The id must contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). The maximum length is 40 characters. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types.Job` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "submit_job" not in self._inner_api_calls: - self._inner_api_calls[ - "submit_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.submit_job, - default_retry=self._method_configs["SubmitJob"].retry, - default_timeout=self._method_configs["SubmitJob"].timeout, - client_info=self._client_info, - ) - - request = jobs_pb2.SubmitJobRequest( - project_id=project_id, region=region, job=job, request_id=request_id - ) - return self._inner_api_calls["submit_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_job( - self, - project_id, - region, - job_id, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the resource representation for a job in a project. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.JobControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `job_id`: - >>> job_id = '' - >>> - >>> response = client.get_job(project_id, region, job_id) - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the job - belongs to. - region (str): Required. The Dataproc region in which to handle the request. - job_id (str): Required. The job ID. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types.Job` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_job" not in self._inner_api_calls: - self._inner_api_calls[ - "get_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_job, - default_retry=self._method_configs["GetJob"].retry, - default_timeout=self._method_configs["GetJob"].timeout, - client_info=self._client_info, - ) - - request = jobs_pb2.GetJobRequest( - project_id=project_id, region=region, job_id=job_id - ) - return self._inner_api_calls["get_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_jobs( - self, - project_id, - region, - page_size=None, - cluster_name=None, - job_state_matcher=None, - filter_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists regions/{region}/jobs in a project. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.JobControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # Iterate over all results - >>> for element in client.list_jobs(project_id, region): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_jobs(project_id, region).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the job - belongs to. - region (str): Required. The Dataproc region in which to handle the request. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - cluster_name (str): Optional. If set, the returned jobs list includes only jobs that were - submitted to the named cluster. - job_state_matcher (~google.cloud.dataproc_v1.types.JobStateMatcher): Optional. Specifies enumerated categories of jobs to list. (default = - match ALL jobs). - - If ``filter`` is provided, ``jobStateMatcher`` will be ignored. - filter_ (str): Optional. A filter constraining the jobs to list. Filters are - case-sensitive and have the following syntax: - - [field = value] AND [field [= value]] ... - - where **field** is ``status.state`` or ``labels.[KEY]``, and ``[KEY]`` - is a label key. **value** can be ``*`` to match all values. - ``status.state`` can be either ``ACTIVE`` or ``NON_ACTIVE``. Only the - logical ``AND`` operator is supported; space-separated items are treated - as having an implicit ``AND`` operator. - - Example filter: - - status.state = ACTIVE AND labels.env = staging AND labels.starred = \* - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.dataproc_v1.types.Job` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_jobs" not in self._inner_api_calls: - self._inner_api_calls[ - "list_jobs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_jobs, - default_retry=self._method_configs["ListJobs"].retry, - default_timeout=self._method_configs["ListJobs"].timeout, - client_info=self._client_info, - ) - - request = jobs_pb2.ListJobsRequest( - project_id=project_id, - region=region, - page_size=page_size, - cluster_name=cluster_name, - job_state_matcher=job_state_matcher, - filter=filter_, - ) - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_jobs"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="jobs", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def update_job( - self, - project_id, - region, - job_id, - job, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a job in a project. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.JobControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `job_id`: - >>> job_id = '' - >>> - >>> # TODO: Initialize `job`: - >>> job = {} - >>> - >>> # TODO: Initialize `update_mask`: - >>> update_mask = {} - >>> - >>> response = client.update_job(project_id, region, job_id, job, update_mask) - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the job - belongs to. - region (str): Required. The Dataproc region in which to handle the request. - job_id (str): Required. The job ID. - job (Union[dict, ~google.cloud.dataproc_v1.types.Job]): Required. The changes to the job. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1.types.Job` - update_mask (Union[dict, ~google.cloud.dataproc_v1.types.FieldMask]): Required. Specifies the path, relative to Job, of the field to update. - For example, to update the labels of a Job the update\_mask parameter - would be specified as labels, and the ``PATCH`` request body would - specify the new value. Note: Currently, labels is the only field that - can be updated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types.Job` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_job" not in self._inner_api_calls: - self._inner_api_calls[ - "update_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_job, - default_retry=self._method_configs["UpdateJob"].retry, - default_timeout=self._method_configs["UpdateJob"].timeout, - client_info=self._client_info, - ) - - request = jobs_pb2.UpdateJobRequest( - project_id=project_id, - region=region, - job_id=job_id, - job=job, - update_mask=update_mask, - ) - return self._inner_api_calls["update_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def cancel_job( - self, - project_id, - region, - job_id, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Starts a job cancellation request. To access the job resource after - cancellation, call - `regions/{region}/jobs.list `__ - or - `regions/{region}/jobs.get `__. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.JobControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `job_id`: - >>> job_id = '' - >>> - >>> response = client.cancel_job(project_id, region, job_id) - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the job - belongs to. - region (str): Required. The Dataproc region in which to handle the request. - job_id (str): Required. The job ID. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types.Job` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "cancel_job" not in self._inner_api_calls: - self._inner_api_calls[ - "cancel_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.cancel_job, - default_retry=self._method_configs["CancelJob"].retry, - default_timeout=self._method_configs["CancelJob"].timeout, - client_info=self._client_info, - ) - - request = jobs_pb2.CancelJobRequest( - project_id=project_id, region=region, job_id=job_id - ) - return self._inner_api_calls["cancel_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_job( - self, - project_id, - region, - job_id, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes the job from the project. If the job is active, the delete - fails, and the response returns ``FAILED_PRECONDITION``. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.JobControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `job_id`: - >>> job_id = '' - >>> - >>> client.delete_job(project_id, region, job_id) - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the job - belongs to. - region (str): Required. The Dataproc region in which to handle the request. - job_id (str): Required. The job ID. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_job" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_job, - default_retry=self._method_configs["DeleteJob"].retry, - default_timeout=self._method_configs["DeleteJob"].timeout, - client_info=self._client_info, - ) - - request = jobs_pb2.DeleteJobRequest( - project_id=project_id, region=region, job_id=job_id - ) - self._inner_api_calls["delete_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/dataproc/google/cloud/dataproc_v1/gapic/job_controller_client_config.py b/dataproc/google/cloud/dataproc_v1/gapic/job_controller_client_config.py deleted file mode 100644 index 7cc7eaba38df..000000000000 --- a/dataproc/google/cloud/dataproc_v1/gapic/job_controller_client_config.py +++ /dev/null @@ -1,53 +0,0 @@ -config = { - "interfaces": { - "google.cloud.dataproc.v1.JobController": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": ["UNAVAILABLE"], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 30000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 30000, - "total_timeout_millis": 900000, - } - }, - "methods": { - "SubmitJob": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetJob": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListJobs": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "UpdateJob": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "CancelJob": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteJob": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/dataproc/google/cloud/dataproc_v1/gapic/transports/__init__.py b/dataproc/google/cloud/dataproc_v1/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/dataproc/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py b/dataproc/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py deleted file mode 100644 index 8ffede67d395..000000000000 --- a/dataproc/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py +++ /dev/null @@ -1,204 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers -import google.api_core.operations_v1 - -from google.cloud.dataproc_v1.proto import clusters_pb2_grpc - - -class ClusterControllerGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.dataproc.v1 ClusterController API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="dataproc.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "cluster_controller_stub": clusters_pb2_grpc.ClusterControllerStub(channel) - } - - # Because this API includes a method that returns a - # long-running operation (proto: google.longrunning.Operation), - # instantiate an LRO client. - self._operations_client = google.api_core.operations_v1.OperationsClient( - channel - ) - - @classmethod - def create_channel( - cls, address="dataproc.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_cluster(self): - """Return the gRPC stub for :meth:`ClusterControllerClient.create_cluster`. - - Creates a cluster in a project. The returned ``Operation.metadata`` will - be - `ClusterOperationMetadata `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cluster_controller_stub"].CreateCluster - - @property - def update_cluster(self): - """Return the gRPC stub for :meth:`ClusterControllerClient.update_cluster`. - - Updates a cluster in a project. The returned ``Operation.metadata`` will - be - `ClusterOperationMetadata `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cluster_controller_stub"].UpdateCluster - - @property - def delete_cluster(self): - """Return the gRPC stub for :meth:`ClusterControllerClient.delete_cluster`. - - Deletes a cluster in a project. The returned ``Operation.metadata`` will - be - `ClusterOperationMetadata `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cluster_controller_stub"].DeleteCluster - - @property - def get_cluster(self): - """Return the gRPC stub for :meth:`ClusterControllerClient.get_cluster`. - - Gets the resource representation for a cluster in a project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cluster_controller_stub"].GetCluster - - @property - def list_clusters(self): - """Return the gRPC stub for :meth:`ClusterControllerClient.list_clusters`. - - Lists all regions/{region}/clusters in a project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cluster_controller_stub"].ListClusters - - @property - def diagnose_cluster(self): - """Return the gRPC stub for :meth:`ClusterControllerClient.diagnose_cluster`. - - Gets cluster diagnostic information. The returned ``Operation.metadata`` - will be - `ClusterOperationMetadata `__. - After the operation completes, ``Operation.response`` contains - `DiagnoseClusterResults `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cluster_controller_stub"].DiagnoseCluster diff --git a/dataproc/google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py b/dataproc/google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py deleted file mode 100644 index d71efe4307c5..000000000000 --- a/dataproc/google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py +++ /dev/null @@ -1,189 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.dataproc_v1.proto import jobs_pb2_grpc - - -class JobControllerGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.dataproc.v1 JobController API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="dataproc.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = {"job_controller_stub": jobs_pb2_grpc.JobControllerStub(channel)} - - @classmethod - def create_channel( - cls, address="dataproc.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def submit_job(self): - """Return the gRPC stub for :meth:`JobControllerClient.submit_job`. - - Submits a job to a cluster. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["job_controller_stub"].SubmitJob - - @property - def get_job(self): - """Return the gRPC stub for :meth:`JobControllerClient.get_job`. - - Gets the resource representation for a job in a project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["job_controller_stub"].GetJob - - @property - def list_jobs(self): - """Return the gRPC stub for :meth:`JobControllerClient.list_jobs`. - - Lists regions/{region}/jobs in a project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["job_controller_stub"].ListJobs - - @property - def update_job(self): - """Return the gRPC stub for :meth:`JobControllerClient.update_job`. - - Updates a job in a project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["job_controller_stub"].UpdateJob - - @property - def cancel_job(self): - """Return the gRPC stub for :meth:`JobControllerClient.cancel_job`. - - Starts a job cancellation request. To access the job resource after - cancellation, call - `regions/{region}/jobs.list `__ - or - `regions/{region}/jobs.get `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["job_controller_stub"].CancelJob - - @property - def delete_job(self): - """Return the gRPC stub for :meth:`JobControllerClient.delete_job`. - - Deletes the job from the project. If the job is active, the delete - fails, and the response returns ``FAILED_PRECONDITION``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["job_controller_stub"].DeleteJob diff --git a/dataproc/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py b/dataproc/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py deleted file mode 100644 index 06564b0797f5..000000000000 --- a/dataproc/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py +++ /dev/null @@ -1,249 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers -import google.api_core.operations_v1 - -from google.cloud.dataproc_v1.proto import workflow_templates_pb2_grpc - - -class WorkflowTemplateServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.dataproc.v1 WorkflowTemplateService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="dataproc.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "workflow_template_service_stub": workflow_templates_pb2_grpc.WorkflowTemplateServiceStub( - channel - ) - } - - # Because this API includes a method that returns a - # long-running operation (proto: google.longrunning.Operation), - # instantiate an LRO client. - self._operations_client = google.api_core.operations_v1.OperationsClient( - channel - ) - - @classmethod - def create_channel( - cls, address="dataproc.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_workflow_template(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.create_workflow_template`. - - Creates new workflow template. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["workflow_template_service_stub"].CreateWorkflowTemplate - - @property - def get_workflow_template(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.get_workflow_template`. - - Retrieves the latest workflow template. - - Can retrieve previously instantiated template by specifying optional - version parameter. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["workflow_template_service_stub"].GetWorkflowTemplate - - @property - def instantiate_workflow_template(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.instantiate_workflow_template`. - - Instantiates a template and begins execution. - - The returned Operation can be used to track execution of workflow by - polling ``operations.get``. The Operation will complete when entire - workflow is finished. - - The running workflow can be aborted via ``operations.cancel``. This will - cause any inflight jobs to be cancelled and workflow-owned clusters to - be deleted. - - The ``Operation.metadata`` will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, ``Operation.response`` will be ``Empty``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["workflow_template_service_stub"].InstantiateWorkflowTemplate - - @property - def instantiate_inline_workflow_template(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.instantiate_inline_workflow_template`. - - Instantiates a template and begins execution. - - This method is equivalent to executing the sequence - ``CreateWorkflowTemplate``, ``InstantiateWorkflowTemplate``, - ``DeleteWorkflowTemplate``. - - The returned Operation can be used to track execution of workflow by - polling ``operations.get``. The Operation will complete when entire - workflow is finished. - - The running workflow can be aborted via ``operations.cancel``. This will - cause any inflight jobs to be cancelled and workflow-owned clusters to - be deleted. - - The ``Operation.metadata`` will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, ``Operation.response`` will be ``Empty``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs[ - "workflow_template_service_stub" - ].InstantiateInlineWorkflowTemplate - - @property - def update_workflow_template(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.update_workflow_template`. - - Updates (replaces) workflow template. The updated template - must contain version that matches the current server version. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["workflow_template_service_stub"].UpdateWorkflowTemplate - - @property - def list_workflow_templates(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.list_workflow_templates`. - - Lists workflows that match the specified filter in the request. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["workflow_template_service_stub"].ListWorkflowTemplates - - @property - def delete_workflow_template(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.delete_workflow_template`. - - Deletes a workflow template. It does not cancel in-progress workflows. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["workflow_template_service_stub"].DeleteWorkflowTemplate diff --git a/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py b/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py deleted file mode 100644 index 27e3eadcef7b..000000000000 --- a/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py +++ /dev/null @@ -1,933 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.dataproc.v1 WorkflowTemplateService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.operation -import google.api_core.operations_v1 -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.cloud.dataproc_v1.gapic import enums -from google.cloud.dataproc_v1.gapic import workflow_template_service_client_config -from google.cloud.dataproc_v1.gapic.transports import ( - workflow_template_service_grpc_transport, -) -from google.cloud.dataproc_v1.proto import clusters_pb2 -from google.cloud.dataproc_v1.proto import clusters_pb2_grpc -from google.cloud.dataproc_v1.proto import jobs_pb2 -from google.cloud.dataproc_v1.proto import jobs_pb2_grpc -from google.cloud.dataproc_v1.proto import operations_pb2 as proto_operations_pb2 -from google.cloud.dataproc_v1.proto import workflow_templates_pb2 -from google.cloud.dataproc_v1.proto import workflow_templates_pb2_grpc -from google.longrunning import operations_pb2 as longrunning_operations_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version - - -class WorkflowTemplateServiceClient(object): - """ - The API interface for managing Workflow Templates in the - Dataproc API. - """ - - SERVICE_ADDRESS = "dataproc.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.dataproc.v1.WorkflowTemplateService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - WorkflowTemplateServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def region_path(cls, project, region): - """Return a fully-qualified region string.""" - return google.api_core.path_template.expand( - "projects/{project}/regions/{region}", project=project, region=region - ) - - @classmethod - def workflow_template_path(cls, project, region, workflow_template): - """Return a fully-qualified workflow_template string.""" - return google.api_core.path_template.expand( - "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}", - project=project, - region=region, - workflow_template=workflow_template, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.WorkflowTemplateServiceGrpcTransport, - Callable[[~.Credentials, type], ~.WorkflowTemplateServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = workflow_template_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=workflow_template_service_grpc_transport.WorkflowTemplateServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = workflow_template_service_grpc_transport.WorkflowTemplateServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_workflow_template( - self, - parent, - template, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates new workflow template. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.WorkflowTemplateServiceClient() - >>> - >>> parent = client.region_path('[PROJECT]', '[REGION]') - >>> - >>> # TODO: Initialize `template`: - >>> template = {} - >>> - >>> response = client.create_workflow_template(parent, template) - - Args: - parent (str): Required. The resource name of the region or location, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.workflowTemplates,create``, the resource name - of the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.workflowTemplates.create``, the resource - name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - template (Union[dict, ~google.cloud.dataproc_v1.types.WorkflowTemplate]): Required. The Dataproc workflow template to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1.types.WorkflowTemplate` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types.WorkflowTemplate` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_workflow_template" not in self._inner_api_calls: - self._inner_api_calls[ - "create_workflow_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_workflow_template, - default_retry=self._method_configs["CreateWorkflowTemplate"].retry, - default_timeout=self._method_configs["CreateWorkflowTemplate"].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.CreateWorkflowTemplateRequest( - parent=parent, template=template - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_workflow_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_workflow_template( - self, - name, - version=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Retrieves the latest workflow template. - - Can retrieve previously instantiated template by specifying optional - version parameter. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.WorkflowTemplateServiceClient() - >>> - >>> name = client.workflow_template_path('[PROJECT]', '[REGION]', '[WORKFLOW_TEMPLATE]') - >>> - >>> response = client.get_workflow_template(name) - - Args: - name (str): Required. The resource name of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.workflowTemplates.get``, the resource name of - the template has the following format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For ``projects.locations.workflowTemplates.get``, the resource name - of the template has the following format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - version (int): Optional. The version of workflow template to retrieve. Only previously - instantiated versions can be retrieved. - - If unspecified, retrieves the current version. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types.WorkflowTemplate` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_workflow_template" not in self._inner_api_calls: - self._inner_api_calls[ - "get_workflow_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_workflow_template, - default_retry=self._method_configs["GetWorkflowTemplate"].retry, - default_timeout=self._method_configs["GetWorkflowTemplate"].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.GetWorkflowTemplateRequest( - name=name, version=version - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_workflow_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def instantiate_workflow_template( - self, - name, - version=None, - request_id=None, - parameters=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Instantiates a template and begins execution. - - The returned Operation can be used to track execution of workflow by - polling ``operations.get``. The Operation will complete when entire - workflow is finished. - - The running workflow can be aborted via ``operations.cancel``. This will - cause any inflight jobs to be cancelled and workflow-owned clusters to - be deleted. - - The ``Operation.metadata`` will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, ``Operation.response`` will be ``Empty``. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.WorkflowTemplateServiceClient() - >>> - >>> name = client.workflow_template_path('[PROJECT]', '[REGION]', '[WORKFLOW_TEMPLATE]') - >>> - >>> response = client.instantiate_workflow_template(name) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - name (str): Required. The resource name of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.workflowTemplates.instantiate``, the resource - name of the template has the following format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For ``projects.locations.workflowTemplates.instantiate``, the - resource name of the template has the following format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - version (int): Optional. The version of workflow template to instantiate. If specified, - the workflow will be instantiated only if the current version of - the workflow template has the supplied version. - - This option cannot be used to instantiate a previous version of - workflow template. - request_id (str): Optional. A tag that prevents multiple concurrent workflow instances - with the same tag from running. This mitigates risk of concurrent - instances started due to retries. - - It is recommended to always set this value to a - `UUID `__. - - The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). The maximum length is 40 characters. - parameters (dict[str -> str]): Optional. Map from parameter names to values that should be used for those - parameters. Values may not exceed 100 characters. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "instantiate_workflow_template" not in self._inner_api_calls: - self._inner_api_calls[ - "instantiate_workflow_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.instantiate_workflow_template, - default_retry=self._method_configs["InstantiateWorkflowTemplate"].retry, - default_timeout=self._method_configs[ - "InstantiateWorkflowTemplate" - ].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.InstantiateWorkflowTemplateRequest( - name=name, version=version, request_id=request_id, parameters=parameters - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["instantiate_workflow_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=workflow_templates_pb2.WorkflowMetadata, - ) - - def instantiate_inline_workflow_template( - self, - parent, - template, - request_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Instantiates a template and begins execution. - - This method is equivalent to executing the sequence - ``CreateWorkflowTemplate``, ``InstantiateWorkflowTemplate``, - ``DeleteWorkflowTemplate``. - - The returned Operation can be used to track execution of workflow by - polling ``operations.get``. The Operation will complete when entire - workflow is finished. - - The running workflow can be aborted via ``operations.cancel``. This will - cause any inflight jobs to be cancelled and workflow-owned clusters to - be deleted. - - The ``Operation.metadata`` will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, ``Operation.response`` will be ``Empty``. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.WorkflowTemplateServiceClient() - >>> - >>> parent = client.region_path('[PROJECT]', '[REGION]') - >>> - >>> # TODO: Initialize `template`: - >>> template = {} - >>> - >>> response = client.instantiate_inline_workflow_template(parent, template) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - parent (str): Required. The resource name of the region or location, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.workflowTemplates,instantiateinline``, the - resource name of the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.workflowTemplates.instantiateinline``, the - resource name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - template (Union[dict, ~google.cloud.dataproc_v1.types.WorkflowTemplate]): Required. The workflow template to instantiate. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1.types.WorkflowTemplate` - request_id (str): Optional. A tag that prevents multiple concurrent workflow instances - with the same tag from running. This mitigates risk of concurrent - instances started due to retries. - - It is recommended to always set this value to a - `UUID `__. - - The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). The maximum length is 40 characters. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "instantiate_inline_workflow_template" not in self._inner_api_calls: - self._inner_api_calls[ - "instantiate_inline_workflow_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.instantiate_inline_workflow_template, - default_retry=self._method_configs[ - "InstantiateInlineWorkflowTemplate" - ].retry, - default_timeout=self._method_configs[ - "InstantiateInlineWorkflowTemplate" - ].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.InstantiateInlineWorkflowTemplateRequest( - parent=parent, template=template, request_id=request_id - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["instantiate_inline_workflow_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=workflow_templates_pb2.WorkflowMetadata, - ) - - def update_workflow_template( - self, - template, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates (replaces) workflow template. The updated template - must contain version that matches the current server version. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.WorkflowTemplateServiceClient() - >>> - >>> # TODO: Initialize `template`: - >>> template = {} - >>> - >>> response = client.update_workflow_template(template) - - Args: - template (Union[dict, ~google.cloud.dataproc_v1.types.WorkflowTemplate]): Required. The updated workflow template. - - The ``template.version`` field must match the current version. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1.types.WorkflowTemplate` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1.types.WorkflowTemplate` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_workflow_template" not in self._inner_api_calls: - self._inner_api_calls[ - "update_workflow_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_workflow_template, - default_retry=self._method_configs["UpdateWorkflowTemplate"].retry, - default_timeout=self._method_configs["UpdateWorkflowTemplate"].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.UpdateWorkflowTemplateRequest( - template=template - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("template.name", template.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_workflow_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_workflow_templates( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists workflows that match the specified filter in the request. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.WorkflowTemplateServiceClient() - >>> - >>> parent = client.region_path('[PROJECT]', '[REGION]') - >>> - >>> # Iterate over all results - >>> for element in client.list_workflow_templates(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_workflow_templates(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The resource name of the region or location, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.workflowTemplates,list``, the resource name of - the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.workflowTemplates.list``, the resource name - of the location has the following format: - ``projects/{project_id}/locations/{location}`` - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.dataproc_v1.types.WorkflowTemplate` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_workflow_templates" not in self._inner_api_calls: - self._inner_api_calls[ - "list_workflow_templates" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_workflow_templates, - default_retry=self._method_configs["ListWorkflowTemplates"].retry, - default_timeout=self._method_configs["ListWorkflowTemplates"].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.ListWorkflowTemplatesRequest( - parent=parent, page_size=page_size - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_workflow_templates"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="templates", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_workflow_template( - self, - name, - version=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a workflow template. It does not cancel in-progress workflows. - - Example: - >>> from google.cloud import dataproc_v1 - >>> - >>> client = dataproc_v1.WorkflowTemplateServiceClient() - >>> - >>> name = client.workflow_template_path('[PROJECT]', '[REGION]', '[WORKFLOW_TEMPLATE]') - >>> - >>> client.delete_workflow_template(name) - - Args: - name (str): Required. The resource name of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.workflowTemplates.delete``, the resource name - of the template has the following format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For ``projects.locations.workflowTemplates.instantiate``, the - resource name of the template has the following format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - version (int): Optional. The version of workflow template to delete. If specified, - will only delete the template if the current server version matches - specified version. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_workflow_template" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_workflow_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_workflow_template, - default_retry=self._method_configs["DeleteWorkflowTemplate"].retry, - default_timeout=self._method_configs["DeleteWorkflowTemplate"].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.DeleteWorkflowTemplateRequest( - name=name, version=version - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_workflow_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client_config.py b/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client_config.py deleted file mode 100644 index 65b6260b5801..000000000000 --- a/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client_config.py +++ /dev/null @@ -1,58 +0,0 @@ -config = { - "interfaces": { - "google.cloud.dataproc.v1.WorkflowTemplateService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": ["UNAVAILABLE"], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "CreateWorkflowTemplate": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetWorkflowTemplate": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "InstantiateWorkflowTemplate": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "InstantiateInlineWorkflowTemplate": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateWorkflowTemplate": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "ListWorkflowTemplates": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteWorkflowTemplate": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/dataproc/google/cloud/dataproc_v1/proto/__init__.py b/dataproc/google/cloud/dataproc_v1/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies.proto b/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies.proto deleted file mode 100644 index 65035a596f8d..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies.proto +++ /dev/null @@ -1,340 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.dataproc.v1; - -import "google/api/annotations.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/api/client.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "AutoscalingPoliciesProto"; -option java_package = "com.google.cloud.dataproc.v1"; - -// The API interface for managing autoscaling policies in the -// Dataproc API. -service AutoscalingPolicyService { - option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates new autoscaling policy. - rpc CreateAutoscalingPolicy(CreateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { - option (google.api.http) = { - post: "/v1/{parent=projects/*/locations/*}/autoscalingPolicies" - body: "policy" - additional_bindings { - post: "/v1/{parent=projects/*/regions/*}/autoscalingPolicies" - body: "policy" - } - }; - } - - // Updates (replaces) autoscaling policy. - // - // Disabled check for update_mask, because all updates will be full - // replacements. - rpc UpdateAutoscalingPolicy(UpdateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { - option (google.api.http) = { - put: "/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}" - body: "policy" - additional_bindings { - put: "/v1/{policy.name=projects/*/regions/*/autoscalingPolicies/*}" - body: "policy" - } - }; - } - - // Retrieves autoscaling policy. - rpc GetAutoscalingPolicy(GetAutoscalingPolicyRequest) returns (AutoscalingPolicy) { - option (google.api.http) = { - get: "/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" - additional_bindings { - get: "/v1/{name=projects/*/regions/*/autoscalingPolicies/*}" - } - }; - } - - // Lists autoscaling policies in the project. - rpc ListAutoscalingPolicies(ListAutoscalingPoliciesRequest) returns (ListAutoscalingPoliciesResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*/locations/*}/autoscalingPolicies" - additional_bindings { - get: "/v1/{parent=projects/*/regions/*}/autoscalingPolicies" - } - }; - } - - // Deletes an autoscaling policy. It is an error to delete an autoscaling - // policy that is in use by one or more clusters. - rpc DeleteAutoscalingPolicy(DeleteAutoscalingPolicyRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" - additional_bindings { - delete: "/v1/{name=projects/*/regions/*/autoscalingPolicies/*}" - } - }; - } -} - -// Describes an autoscaling policy for Dataproc cluster autoscaler. -message AutoscalingPolicy { - option (google.api.resource) = { - type: "dataproc.googleapis.com/AutoscalingPolicy" - pattern: "projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}" - }; - - // Required. The policy id. - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). Cannot begin or end with underscore - // or hyphen. Must consist of between 3 and 50 characters. - // - string id = 1; - - // Output only. The "resource name" of the autoscaling policy, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies`, the resource name of the - // policy has the following format: - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - // - // * For `projects.locations.autoscalingPolicies`, the resource name of the - // policy has the following format: - // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` - string name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Autoscaling algorithm for policy. - oneof algorithm { - BasicAutoscalingAlgorithm basic_algorithm = 3 [(google.api.field_behavior) = REQUIRED]; - } - - // Required. Describes how the autoscaler will operate for primary workers. - InstanceGroupAutoscalingPolicyConfig worker_config = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Describes how the autoscaler will operate for secondary workers. - InstanceGroupAutoscalingPolicyConfig secondary_worker_config = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Basic algorithm for autoscaling. -message BasicAutoscalingAlgorithm { - // Required. YARN autoscaling configuration. - BasicYarnAutoscalingConfig yarn_config = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Duration between scaling events. A scaling period starts after - // the update operation from the previous event has completed. - // - // Bounds: [2m, 1d]. Default: 2m. - google.protobuf.Duration cooldown_period = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// Basic autoscaling configurations for YARN. -message BasicYarnAutoscalingConfig { - // Required. Timeout for YARN graceful decommissioning of Node Managers. - // Specifies the duration to wait for jobs to complete before forcefully - // removing workers (and potentially interrupting jobs). Only applicable to - // downscaling operations. - // - // Bounds: [0s, 1d]. - google.protobuf.Duration graceful_decommission_timeout = 5 [(google.api.field_behavior) = REQUIRED]; - - // Required. Fraction of average pending memory in the last cooldown period - // for which to add workers. A scale-up factor of 1.0 will result in scaling - // up so that there is no pending memory remaining after the update (more - // aggressive scaling). A scale-up factor closer to 0 will result in a smaller - // magnitude of scaling up (less aggressive scaling). - // - // Bounds: [0.0, 1.0]. - double scale_up_factor = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. Fraction of average pending memory in the last cooldown period - // for which to remove workers. A scale-down factor of 1 will result in - // scaling down so that there is no available memory remaining after the - // update (more aggressive scaling). A scale-down factor of 0 disables - // removing workers, which can be beneficial for autoscaling a single job. - // - // Bounds: [0.0, 1.0]. - double scale_down_factor = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Minimum scale-up threshold as a fraction of total cluster size - // before scaling occurs. For example, in a 20-worker cluster, a threshold of - // 0.1 means the autoscaler must recommend at least a 2-worker scale-up for - // the cluster to scale. A threshold of 0 means the autoscaler will scale up - // on any recommended change. - // - // Bounds: [0.0, 1.0]. Default: 0.0. - double scale_up_min_worker_fraction = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Minimum scale-down threshold as a fraction of total cluster size - // before scaling occurs. For example, in a 20-worker cluster, a threshold of - // 0.1 means the autoscaler must recommend at least a 2 worker scale-down for - // the cluster to scale. A threshold of 0 means the autoscaler will scale down - // on any recommended change. - // - // Bounds: [0.0, 1.0]. Default: 0.0. - double scale_down_min_worker_fraction = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// Configuration for the size bounds of an instance group, including its -// proportional size to other groups. -message InstanceGroupAutoscalingPolicyConfig { - // Optional. Minimum number of instances for this group. - // - // Primary workers - Bounds: [2, max_instances]. Default: 2. - // Secondary workers - Bounds: [0, max_instances]. Default: 0. - int32 min_instances = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Maximum number of instances for this group. Required for primary - // workers. Note that by default, clusters will not use secondary workers. - // Required for secondary workers if the minimum secondary instances is set. - // - // Primary workers - Bounds: [min_instances, ). - // Secondary workers - Bounds: [min_instances, ). Default: 0. - int32 max_instances = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Weight for the instance group, which is used to determine the - // fraction of total workers in the cluster from this instance group. - // For example, if primary workers have weight 2, and secondary workers have - // weight 1, the cluster will have approximately 2 primary workers for each - // secondary worker. - // - // The cluster may not reach the specified balance if constrained - // by min/max bounds or other autoscaling settings. For example, if - // `max_instances` for secondary workers is 0, then only primary workers will - // be added. The cluster can also be out of balance when created. - // - // If weight is not set on any instance group, the cluster will default to - // equal weight for all groups: the cluster will attempt to maintain an equal - // number of workers in each group within the configured size bounds for each - // group. If weight is set for one group only, the cluster will default to - // zero weight on the unset group. For example if weight is set only on - // primary workers, the cluster will use primary workers only and no - // secondary workers. - int32 weight = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to create an autoscaling policy. -message CreateAutoscalingPolicyRequest { - // Required. The "resource name" of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies.create`, the resource name - // of the region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.autoscalingPolicies.create`, the resource name - // of the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/AutoscalingPolicy" - } - ]; - - // The autoscaling policy to create. - AutoscalingPolicy policy = 2; -} - -// A request to fetch an autoscaling policy. -message GetAutoscalingPolicyRequest { - // Required. The "resource name" of the autoscaling policy, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies.get`, the resource name - // of the policy has the following format: - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - // - // * For `projects.locations.autoscalingPolicies.get`, the resource name - // of the policy has the following format: - // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/AutoscalingPolicy" - } - ]; -} - -// A request to update an autoscaling policy. -message UpdateAutoscalingPolicyRequest { - // Required. The updated autoscaling policy. - AutoscalingPolicy policy = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to delete an autoscaling policy. -// -// Autoscaling policies in use by one or more clusters will not be deleted. -message DeleteAutoscalingPolicyRequest { - // Required. The "resource name" of the autoscaling policy, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies.delete`, the resource name - // of the policy has the following format: - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - // - // * For `projects.locations.autoscalingPolicies.delete`, the resource name - // of the policy has the following format: - // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/AutoscalingPolicy" - } - ]; -} - -// A request to list autoscaling policies in a project. -message ListAutoscalingPoliciesRequest { - // Required. The "resource name" of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies.list`, the resource name - // of the region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.autoscalingPolicies.list`, the resource name - // of the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/AutoscalingPolicy" - } - ]; - - // Optional. The maximum number of results to return in each response. - // Must be less than or equal to 1000. Defaults to 100. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The page token, returned by a previous call, to request the - // next page of results. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// A response to a request to list autoscaling policies in a project. -message ListAutoscalingPoliciesResponse { - // Output only. Autoscaling policies list. - repeated AutoscalingPolicy policies = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. This token is included in the response if there are more - // results to fetch. - string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} diff --git a/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py deleted file mode 100644 index 8d76aae29940..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py +++ /dev/null @@ -1,1209 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dataproc_v1/proto/autoscaling_policies.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dataproc_v1/proto/autoscaling_policies.proto", - package="google.cloud.dataproc.v1", - syntax="proto3", - serialized_options=_b( - "\n\034com.google.cloud.dataproc.v1B\030AutoscalingPoliciesProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" - ), - serialized_pb=_b( - '\n9google/cloud/dataproc_v1/proto/autoscaling_policies.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x17google/api/client.proto"\xd4\x03\n\x11\x41utoscalingPolicy\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\x04name\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12S\n\x0f\x62\x61sic_algorithm\x18\x03 \x01(\x0b\x32\x33.google.cloud.dataproc.v1.BasicAutoscalingAlgorithmB\x03\xe0\x41\x02H\x00\x12Z\n\rworker_config\x18\x04 \x01(\x0b\x32>.google.cloud.dataproc.v1.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x02\x12\x64\n\x17secondary_worker_config\x18\x05 \x01(\x0b\x32>.google.cloud.dataproc.v1.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x01:|\xea\x41y\n)dataproc.googleapis.com/AutoscalingPolicy\x12Lprojects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}B\x0b\n\talgorithm"\xa4\x01\n\x19\x42\x61sicAutoscalingAlgorithm\x12N\n\x0byarn_config\x18\x01 \x01(\x0b\x32\x34.google.cloud.dataproc.v1.BasicYarnAutoscalingConfigB\x03\xe0\x41\x02\x12\x37\n\x0f\x63ooldown_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\xf9\x01\n\x1a\x42\x61sicYarnAutoscalingConfig\x12\x45\n\x1dgraceful_decommission_timeout\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12\x1c\n\x0fscale_up_factor\x18\x01 \x01(\x01\x42\x03\xe0\x41\x02\x12\x1e\n\x11scale_down_factor\x18\x02 \x01(\x01\x42\x03\xe0\x41\x02\x12)\n\x1cscale_up_min_worker_fraction\x18\x03 \x01(\x01\x42\x03\xe0\x41\x01\x12+\n\x1escale_down_min_worker_fraction\x18\x04 \x01(\x01\x42\x03\xe0\x41\x01"s\n$InstanceGroupAutoscalingPolicyConfig\x12\x1a\n\rmin_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1a\n\rmax_instances\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02\x12\x13\n\x06weight\x18\x03 \x01(\x05\x42\x03\xe0\x41\x01"\xa0\x01\n\x1e\x43reateAutoscalingPolicyRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12;\n\x06policy\x18\x02 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicy"^\n\x1bGetAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"b\n\x1eUpdateAutoscalingPolicyRequest\x12@\n\x06policy\x18\x01 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicyB\x03\xe0\x41\x02"a\n\x1e\x44\x65leteAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"\x94\x01\n\x1eListAutoscalingPoliciesRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x83\x01\n\x1fListAutoscalingPoliciesResponse\x12\x42\n\x08policies\x18\x01 \x03(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicyB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\x32\xfd\n\n\x18\x41utoscalingPolicyService\x12\x8c\x02\n\x17\x43reateAutoscalingPolicy\x12\x38.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest\x1a+.google.cloud.dataproc.v1.AutoscalingPolicy"\x89\x01\x82\xd3\xe4\x93\x02\x82\x01"7/v1/{parent=projects/*/locations/*}/autoscalingPolicies:\x06policyZ?"5/v1/{parent=projects/*/regions/*}/autoscalingPolicies:\x06policy\x12\x9a\x02\n\x17UpdateAutoscalingPolicy\x12\x38.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest\x1a+.google.cloud.dataproc.v1.AutoscalingPolicy"\x97\x01\x82\xd3\xe4\x93\x02\x90\x01\x1a>/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}:\x06policyZF\x1a/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}:\006policyZF\032 labels = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. Cluster status. - ClusterStatus status = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The previous cluster status. - repeated ClusterStatus status_history = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. A cluster UUID (Unique Universal Identifier). Dataproc - // generates this value when it creates the cluster. - string cluster_uuid = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Contains cluster daemon metrics such as HDFS and YARN stats. - // - // **Beta Feature**: This report is available for testing purposes only. It - // may be changed before final release. - ClusterMetrics metrics = 9; -} - -// The cluster config. -message ClusterConfig { - // Optional. A Cloud Storage bucket used to stage job - // dependencies, config files, and job driver console output. - // If you do not specify a staging bucket, Cloud - // Dataproc will determine a Cloud Storage location (US, - // ASIA, or EU) for your cluster's staging bucket according to the - // Compute Engine zone where your cluster is deployed, and then create - // and manage this project-level, per-location bucket (see - // [Dataproc staging - // bucket](/dataproc/docs/concepts/configuring-clusters/staging-bucket)). - string config_bucket = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The shared Compute Engine config settings for - // all instances in a cluster. - GceClusterConfig gce_cluster_config = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine config settings for - // the master instance in a cluster. - InstanceGroupConfig master_config = 9 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine config settings for - // worker instances in a cluster. - InstanceGroupConfig worker_config = 10 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine config settings for - // additional worker instances in a cluster. - InstanceGroupConfig secondary_worker_config = 12 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The config settings for software inside the cluster. - SoftwareConfig software_config = 13 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Commands to execute on each node after config is - // completed. By default, executables are run on master and all worker nodes. - // You can test a node's `role` metadata to run an executable on - // a master or worker node, as shown below using `curl` (you can also use - // `wget`): - // - // ROLE=$(curl -H Metadata-Flavor:Google - // http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) - // if [[ "${ROLE}" == 'Master' ]]; then - // ... master specific actions ... - // else - // ... worker specific actions ... - // fi - repeated NodeInitializationAction initialization_actions = 11 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Encryption settings for the cluster. - EncryptionConfig encryption_config = 15 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Autoscaling config for the policy associated with the cluster. - // Cluster does not autoscale if this field is unset. - AutoscalingConfig autoscaling_config = 18 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Security settings for the cluster. - SecurityConfig security_config = 16 [(google.api.field_behavior) = OPTIONAL]; -} - -// Autoscaling Policy config associated with the cluster. -message AutoscalingConfig { - // Optional. The autoscaling policy used by the cluster. - // - // Only resource names including projectid and location (region) are valid. - // Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` - // * `projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` - // - // Note that the policy must be in the same project and Dataproc region. - string policy_uri = 1 [(google.api.field_behavior) = OPTIONAL]; -} - -// Encryption settings for the cluster. -message EncryptionConfig { - // Optional. The Cloud KMS key name to use for PD disk encryption for all - // instances in the cluster. - string gce_pd_kms_key_name = 1 [(google.api.field_behavior) = OPTIONAL]; -} - -// Common config settings for resources of Compute Engine cluster -// instances, applicable to all instances in the cluster. -message GceClusterConfig { - // Optional. The zone where the Compute Engine cluster will be located. - // On a create request, it is required in the "global" region. If omitted - // in a non-global Dataproc region, the service will pick a zone in the - // corresponding Compute Engine region. On a get request, zone will - // always be present. - // - // A full URL, partial URI, or short name are valid. Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]` - // * `projects/[project_id]/zones/[zone]` - // * `us-central1-f` - string zone_uri = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine network to be used for machine - // communications. Cannot be specified with subnetwork_uri. If neither - // `network_uri` nor `subnetwork_uri` is specified, the "default" network of - // the project is used, if it exists. Cannot be a "Custom Subnet Network" (see - // [Using Subnetworks](/compute/docs/subnetworks) for more information). - // - // A full URL, partial URI, or short name are valid. Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/global/default` - // * `projects/[project_id]/regions/global/default` - // * `default` - string network_uri = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine subnetwork to be used for machine - // communications. Cannot be specified with network_uri. - // - // A full URL, partial URI, or short name are valid. Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0` - // * `projects/[project_id]/regions/us-east1/subnetworks/sub0` - // * `sub0` - string subnetwork_uri = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If true, all instances in the cluster will only have internal IP - // addresses. By default, clusters are not restricted to internal IP - // addresses, and will have ephemeral external IP addresses assigned to each - // instance. This `internal_ip_only` restriction can only be enabled for - // subnetwork enabled networks, and all off-cluster dependencies must be - // configured to be accessible without external IP addresses. - bool internal_ip_only = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The [Dataproc service - // account](/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc) - // (also see [VM Data Plane - // identity](/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity)) - // used by Dataproc cluster VM instances to access Google Cloud Platform - // services. - // - // If not specified, the - // [Compute Engine default service - // account](/compute/docs/access/service-accounts#default_service_account) - // is used. - string service_account = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The URIs of service account scopes to be included in - // Compute Engine instances. The following base set of scopes is always - // included: - // - // * https://www.googleapis.com/auth/cloud.useraccounts.readonly - // * https://www.googleapis.com/auth/devstorage.read_write - // * https://www.googleapis.com/auth/logging.write - // - // If no scopes are specified, the following defaults are also provided: - // - // * https://www.googleapis.com/auth/bigquery - // * https://www.googleapis.com/auth/bigtable.admin.table - // * https://www.googleapis.com/auth/bigtable.data - // * https://www.googleapis.com/auth/devstorage.full_control - repeated string service_account_scopes = 3 [(google.api.field_behavior) = OPTIONAL]; - - // The Compute Engine tags to add to all instances (see - // [Tagging instances](/compute/docs/label-or-tag-resources#tags)). - repeated string tags = 4; - - // The Compute Engine metadata entries to add to all instances (see - // [Project and instance - // metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). - map metadata = 5; -} - -// Optional. The config settings for Compute Engine resources in -// an instance group, such as a master or worker group. -message InstanceGroupConfig { - // Optional. The number of VM instances in the instance group. - // For master instance groups, must be set to 1. - int32 num_instances = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The list of instance names. Dataproc derives the names - // from `cluster_name`, `num_instances`, and the instance group. - repeated string instance_names = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The Compute Engine image resource used for cluster - // instances. It can be specified or may be inferred from - // `SoftwareConfig.image_version`. - string image_uri = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine machine type used for cluster instances. - // - // A full URL, partial URI, or short name are valid. Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` - // * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` - // * `n1-standard-2` - // - // **Auto Zone Exception**: If you are using the Dataproc - // [Auto Zone - // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) - // feature, you must use the short name of the machine type - // resource, for example, `n1-standard-2`. - string machine_type_uri = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Disk option config settings. - DiskConfig disk_config = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Specifies that this instance group contains preemptible - // instances. - bool is_preemptible = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The config for Compute Engine Instance Group - // Manager that manages this group. - // This is only used for preemptible instance groups. - ManagedGroupConfig managed_group_config = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The Compute Engine accelerator configuration for these - // instances. - repeated AcceleratorConfig accelerators = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Specifies the minimum cpu platform for the Instance Group. - // See [Dataproc→Minimum CPU Platform] - // (/dataproc/docs/concepts/compute/dataproc-min-cpu). - string min_cpu_platform = 9 [(google.api.field_behavior) = OPTIONAL]; -} - -// Specifies the resources used to actively manage an instance group. -message ManagedGroupConfig { - // Output only. The name of the Instance Template used for the Managed - // Instance Group. - string instance_template_name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The name of the Instance Group Manager for this group. - string instance_group_manager_name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Specifies the type and number of accelerator cards attached to the instances -// of an instance. See [GPUs on Compute Engine](/compute/docs/gpus/). -message AcceleratorConfig { - // Full URL, partial URI, or short name of the accelerator type resource to - // expose to this instance. See - // [Compute Engine - // AcceleratorTypes](/compute/docs/reference/beta/acceleratorTypes). - // - // Examples: - // - // * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` - // * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` - // * `nvidia-tesla-k80` - // - // **Auto Zone Exception**: If you are using the Dataproc - // [Auto Zone - // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) - // feature, you must use the short name of the accelerator type - // resource, for example, `nvidia-tesla-k80`. - string accelerator_type_uri = 1; - - // The number of the accelerator cards of this type exposed to this instance. - int32 accelerator_count = 2; -} - -// Specifies the config of disk options for a group of VM instances. -message DiskConfig { - // Optional. Type of the boot disk (default is "pd-standard"). - // Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or - // "pd-standard" (Persistent Disk Hard Disk Drive). - string boot_disk_type = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Size in GB of the boot disk (default is 500GB). - int32 boot_disk_size_gb = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Number of attached SSDs, from 0 to 4 (default is 0). - // If SSDs are not attached, the boot disk is used to store runtime logs and - // [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. - // If one or more SSDs are attached, this runtime bulk - // data is spread across them, and the boot disk contains only basic - // config and installed binaries. - int32 num_local_ssds = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// Specifies an executable to run on a fully configured node and a -// timeout period for executable completion. -message NodeInitializationAction { - // Required. Cloud Storage URI of executable file. - string executable_file = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Amount of time executable has to complete. Default is - // 10 minutes. Cluster creation fails with an explanatory error message (the - // name of the executable that caused the error and the exceeded timeout - // period) if the executable is not completed at end of the timeout period. - google.protobuf.Duration execution_timeout = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// The status of a cluster and its instances. -message ClusterStatus { - // The cluster state. - enum State { - // The cluster state is unknown. - UNKNOWN = 0; - - // The cluster is being created and set up. It is not ready for use. - CREATING = 1; - - // The cluster is currently running and healthy. It is ready for use. - RUNNING = 2; - - // The cluster encountered an error. It is not ready for use. - ERROR = 3; - - // The cluster is being deleted. It cannot be used. - DELETING = 4; - - // The cluster is being updated. It continues to accept and process jobs. - UPDATING = 5; - } - - // The cluster substate. - enum Substate { - // The cluster substate is unknown. - UNSPECIFIED = 0; - - // The cluster is known to be in an unhealthy state - // (for example, critical daemons are not running or HDFS capacity is - // exhausted). - // - // Applies to RUNNING state. - UNHEALTHY = 1; - - // The agent-reported status is out of date (may occur if - // Dataproc loses communication with Agent). - // - // Applies to RUNNING state. - STALE_STATUS = 2; - } - - // Output only. The cluster's state. - State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. Output only. Details of cluster's state. - string detail = 2 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = OPTIONAL - ]; - - // Output only. Time when this state was entered. - google.protobuf.Timestamp state_start_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Additional state information that includes - // status reported by the agent. - Substate substate = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Security related configuration, including Kerberos. -message SecurityConfig { - // Kerberos related configuration. - KerberosConfig kerberos_config = 1; -} - -// Specifies Kerberos related configuration. -message KerberosConfig { - // Optional. Flag to indicate whether to Kerberize the cluster. - bool enable_kerberos = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The Cloud Storage URI of a KMS encrypted file containing the root - // principal password. - string root_principal_password_uri = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The uri of the KMS key used to encrypt various sensitive - // files. - string kms_key_uri = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The Cloud Storage URI of the keystore file used for SSL - // encryption. If not provided, Dataproc will provide a self-signed - // certificate. - string keystore_uri = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of the truststore file used for SSL - // encryption. If not provided, Dataproc will provide a self-signed - // certificate. - string truststore_uri = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // password to the user provided keystore. For the self-signed certificate, - // this password is generated by Dataproc. - string keystore_password_uri = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // password to the user provided key. For the self-signed certificate, this - // password is generated by Dataproc. - string key_password_uri = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // password to the user provided truststore. For the self-signed certificate, - // this password is generated by Dataproc. - string truststore_password_uri = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The remote realm the Dataproc on-cluster KDC will trust, should - // the user enable cross realm trust. - string cross_realm_trust_realm = 9 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The KDC (IP or hostname) for the remote trusted realm in a cross - // realm trust relationship. - string cross_realm_trust_kdc = 10 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The admin server (IP or hostname) for the remote trusted realm in - // a cross realm trust relationship. - string cross_realm_trust_admin_server = 11 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // shared password between the on-cluster Kerberos realm and the remote - // trusted realm, in a cross realm trust relationship. - string cross_realm_trust_shared_password_uri = 12 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // master key of the KDC database. - string kdc_db_key_uri = 13 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The lifetime of the ticket granting ticket, in hours. - // If not specified, or user specifies 0, then default value 10 - // will be used. - int32 tgt_lifetime_hours = 14 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The name of the on-cluster Kerberos realm. - // If not specified, the uppercased domain of hostnames will be the realm. - string realm = 15 [(google.api.field_behavior) = OPTIONAL]; -} - -// Specifies the selection and config of software inside the cluster. -message SoftwareConfig { - // Optional. The version of software inside the cluster. It must be one of the - // supported [Dataproc - // Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions), - // such as "1.2" (including a subminor version, such as "1.2.29"), or the - // ["preview" - // version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). - // If unspecified, it defaults to the latest Debian version. - string image_version = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The properties to set on daemon config files. - // - // Property keys are specified in `prefix:property` format, for example - // `core:hadoop.tmp.dir`. The following are supported prefixes - // and their mappings: - // - // * capacity-scheduler: `capacity-scheduler.xml` - // * core: `core-site.xml` - // * distcp: `distcp-default.xml` - // * hdfs: `hdfs-site.xml` - // * hive: `hive-site.xml` - // * mapred: `mapred-site.xml` - // * pig: `pig.properties` - // * spark: `spark-defaults.conf` - // * yarn: `yarn-site.xml` - // - // For more information, see - // [Cluster properties](/dataproc/docs/concepts/cluster-properties). - map properties = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The set of components to activate on the cluster. - repeated Component optional_components = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// Contains cluster daemon metrics, such as HDFS and YARN stats. -// -// **Beta Feature**: This report is available for testing purposes only. It may -// be changed before final release. -message ClusterMetrics { - // The HDFS metrics. - map hdfs_metrics = 1; - - // The YARN metrics. - map yarn_metrics = 2; -} - -// A request to create a cluster. -message CreateClusterRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster to create. - Cluster cluster = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A unique id used to identify the request. If the server - // receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests with the same - // id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend - // is returned. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to update a cluster. -message UpdateClusterRequest { - // Required. The ID of the Google Cloud Platform project the - // cluster belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 5 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The changes to the cluster. - Cluster cluster = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Timeout for graceful YARN decomissioning. Graceful - // decommissioning allows removing nodes from the cluster without - // interrupting jobs in progress. Timeout specifies how long to wait for jobs - // in progress to finish before forcefully removing nodes (and potentially - // interrupting jobs). Default timeout is 0 (for forceful decommission), and - // the maximum allowed timeout is 1 day. - // - // Only supported on Dataproc image versions 1.2 and higher. - google.protobuf.Duration graceful_decommission_timeout = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Specifies the path, relative to `Cluster`, of - // the field to update. For example, to change the number of workers - // in a cluster to 5, the `update_mask` parameter would be - // specified as `config.worker_config.num_instances`, - // and the `PATCH` request body would specify the new value, as follows: - // - // { - // "config":{ - // "workerConfig":{ - // "numInstances":"5" - // } - // } - // } - // Similarly, to change the number of preemptible workers in a cluster to 5, - // the `update_mask` parameter would be - // `config.secondary_worker_config.num_instances`, and the `PATCH` request - // body would be set as follows: - // - // { - // "config":{ - // "secondaryWorkerConfig":{ - // "numInstances":"5" - // } - // } - // } - // Note: Currently, only the following fields can be updated: - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - //
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or - // change autoscaling policies
- google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A unique id used to identify the request. If the server - // receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests with the same - // id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the - // backend is returned. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 7 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to delete a cluster. -message DeleteClusterRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Specifying the `cluster_uuid` means the RPC should fail - // (with error NOT_FOUND) if cluster with specified UUID does not exist. - string cluster_uuid = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A unique id used to identify the request. If the server - // receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests with the same - // id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the - // backend is returned. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Request to get the resource representation for a cluster in a project. -message GetClusterRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to list the clusters in a project. -message ListClustersRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A filter constraining the clusters to list. Filters are - // case-sensitive and have the following syntax: - // - // field = value [AND [field = value]] ... - // - // where **field** is one of `status.state`, `clusterName`, or `labels.[KEY]`, - // and `[KEY]` is a label key. **value** can be `*` to match all values. - // `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, - // `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` - // contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` - // contains the `DELETING` and `ERROR` states. - // `clusterName` is the name of the cluster provided at creation time. - // Only the logical `AND` operator is supported; space-separated items are - // treated as having an implicit `AND` operator. - // - // Example filter: - // - // status.state = ACTIVE AND clusterName = mycluster - // AND labels.env = staging AND labels.starred = * - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The standard List page size. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The standard List page token. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// The list of all clusters in a project. -message ListClustersResponse { - // Output only. The clusters in the project. - repeated Cluster clusters = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. This token is included in the response if there are more - // results to fetch. To fetch additional results, provide this value as the - // `page_token` in a subsequent `ListClustersRequest`. - string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to collect cluster diagnostic information. -message DiagnoseClusterRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// The location of diagnostic output. -message DiagnoseClusterResults { - // Output only. The Cloud Storage URI of the diagnostic output. - // The output report is a plain text file with a summary of collected - // diagnostics. - string output_uri = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; -} diff --git a/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2.py deleted file mode 100644 index b4c0aa2b6cc4..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2.py +++ /dev/null @@ -1,3885 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dataproc_v1/proto/clusters.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.cloud.dataproc_v1.proto import ( - operations_pb2 as google_dot_cloud_dot_dataproc__v1_dot_proto_dot_operations__pb2, -) -from google.cloud.dataproc_v1.proto import ( - shared_pb2 as google_dot_cloud_dot_dataproc__v1_dot_proto_dot_shared__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dataproc_v1/proto/clusters.proto", - package="google.cloud.dataproc.v1", - syntax="proto3", - serialized_options=_b( - "\n\034com.google.cloud.dataproc.v1B\rClustersProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" - ), - serialized_pb=_b( - '\n-google/cloud/dataproc_v1/proto/clusters.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a/google/cloud/dataproc_v1/proto/operations.proto\x1a+google/cloud/dataproc_v1/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc8\x03\n\x07\x43luster\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12<\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfigB\x03\xe0\x41\x02\x12\x42\n\x06labels\x18\x08 \x03(\x0b\x32-.google.cloud.dataproc.v1.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12<\n\x06status\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x44\n\x0estatus_history\x18\x07 \x03(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x07metrics\x18\t \x01(\x0b\x32(.google.cloud.dataproc.v1.ClusterMetrics\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xe6\x05\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12K\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32*.google.cloud.dataproc.v1.GceClusterConfigB\x03\xe0\x41\x01\x12I\n\rmaster_config\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12I\n\rworker_config\x18\n \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12S\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsoftware_config\x18\r \x01(\x0b\x32(.google.cloud.dataproc.v1.SoftwareConfigB\x03\xe0\x41\x01\x12W\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x32.google.cloud.dataproc.v1.NodeInitializationActionB\x03\xe0\x41\x01\x12J\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32*.google.cloud.dataproc.v1.EncryptionConfigB\x03\xe0\x41\x01\x12L\n\x12\x61utoscaling_config\x18\x12 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsecurity_config\x18\x10 \x01(\x0b\x32(.google.cloud.dataproc.v1.SecurityConfigB\x03\xe0\x41\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\xcd\x02\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12>\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x01\x12O\n\x14managed_group_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1.ManagedGroupConfigB\x03\xe0\x41\x03\x12\x46\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32+.google.cloud.dataproc.v1.AcceleratorConfigB\x03\xe0\x41\x01\x12\x1d\n\x10min_cpu_platform\x18\t \x01(\tB\x03\xe0\x41\x01"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"f\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0enum_local_ssds\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x84\x03\n\rClusterStatus\x12\x41\n\x05state\x18\x01 \x01(\x0e\x32-.google.cloud.dataproc.v1.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x16\n\x06\x64\x65tail\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12G\n\x08substate\x18\x04 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"S\n\x0eSecurityConfig\x12\x41\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32(.google.cloud.dataproc.v1.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"\xf9\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12Q\n\nproperties\x18\x02 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32#.google.cloud.dataproc.v1.ComponentB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x02\n\x0e\x43lusterMetrics\x12O\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry\x12O\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x96\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x02 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xae\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x03 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"n\n\x14ListClustersResponse\x12\x38\n\x08\x63lusters\x18\x01 \x03(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03\x32\xe3\x0c\n\x11\x43lusterController\x12\x80\x02\n\rCreateCluster\x12..google.cloud.dataproc.v1.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\x82\xd3\xe4\x93\x02>"3/v1/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x19project_id,region,cluster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xa8\x02\n\rUpdateCluster\x12..google.cloud.dataproc.v1.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\xc7\x01\x82\xd3\xe4\x93\x02M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\xda\x41\x32project_id,region,cluster_name,cluster,update_mask\x12\x99\x02\n\rDeleteCluster\x12..google.cloud.dataproc.v1.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xb8\x01\x82\xd3\xe4\x93\x02\x44*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\xca\x41J\n\x15google.protobuf.Empty\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xc9\x01\n\nGetCluster\x12+.google.cloud.dataproc.v1.GetClusterRequest\x1a!.google.cloud.dataproc.v1.Cluster"k\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\x12\xd9\x01\n\x0cListClusters\x12-.google.cloud.dataproc.v1.ListClustersRequest\x1a..google.cloud.dataproc.v1.ListClustersResponse"j\x82\xd3\xe4\x93\x02\x35\x12\x33/v1/projects/{project_id}/regions/{region}/clusters\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\x8e\x02\n\x0f\x44iagnoseCluster\x12\x30.google.cloud.dataproc.v1.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xa9\x01\x82\xd3\xe4\x93\x02P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41\x1eproject_id,region,cluster_name\xca\x41/\n\x15google.protobuf.Empty\x12\x16\x44iagnoseClusterResults\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBq\n\x1c\x63om.google.cloud.dataproc.v1B\rClustersProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_dataproc__v1_dot_proto_dot_operations__pb2.DESCRIPTOR, - google_dot_cloud_dot_dataproc__v1_dot_proto_dot_shared__pb2.DESCRIPTOR, - google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_CLUSTERSTATUS_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.cloud.dataproc.v1.ClusterStatus.State", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="UNKNOWN", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CREATING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ERROR", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DELETING", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="UPDATING", index=5, number=5, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3087, - serialized_end=3173, -) -_sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_STATE) - -_CLUSTERSTATUS_SUBSTATE = _descriptor.EnumDescriptor( - name="Substate", - full_name="google.cloud.dataproc.v1.ClusterStatus.Substate", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="UNSPECIFIED", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="UNHEALTHY", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="STALE_STATUS", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3175, - serialized_end=3235, -) -_sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_SUBSTATE) - - -_CLUSTER_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.cloud.dataproc.v1.Cluster.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.Cluster.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.Cluster.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=805, - serialized_end=850, -) - -_CLUSTER = _descriptor.Descriptor( - name="Cluster", - full_name="google.cloud.dataproc.v1.Cluster", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.Cluster.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1.Cluster.cluster_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="config", - full_name="google.cloud.dataproc.v1.Cluster.config", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.cloud.dataproc.v1.Cluster.labels", - index=3, - number=8, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status", - full_name="google.cloud.dataproc.v1.Cluster.status", - index=4, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status_history", - full_name="google.cloud.dataproc.v1.Cluster.status_history", - index=5, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_uuid", - full_name="google.cloud.dataproc.v1.Cluster.cluster_uuid", - index=6, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metrics", - full_name="google.cloud.dataproc.v1.Cluster.metrics", - index=7, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_CLUSTER_LABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=394, - serialized_end=850, -) - - -_CLUSTERCONFIG = _descriptor.Descriptor( - name="ClusterConfig", - full_name="google.cloud.dataproc.v1.ClusterConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="config_bucket", - full_name="google.cloud.dataproc.v1.ClusterConfig.config_bucket", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="gce_cluster_config", - full_name="google.cloud.dataproc.v1.ClusterConfig.gce_cluster_config", - index=1, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="master_config", - full_name="google.cloud.dataproc.v1.ClusterConfig.master_config", - index=2, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="worker_config", - full_name="google.cloud.dataproc.v1.ClusterConfig.worker_config", - index=3, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="secondary_worker_config", - full_name="google.cloud.dataproc.v1.ClusterConfig.secondary_worker_config", - index=4, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="software_config", - full_name="google.cloud.dataproc.v1.ClusterConfig.software_config", - index=5, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="initialization_actions", - full_name="google.cloud.dataproc.v1.ClusterConfig.initialization_actions", - index=6, - number=11, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="encryption_config", - full_name="google.cloud.dataproc.v1.ClusterConfig.encryption_config", - index=7, - number=15, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="autoscaling_config", - full_name="google.cloud.dataproc.v1.ClusterConfig.autoscaling_config", - index=8, - number=18, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="security_config", - full_name="google.cloud.dataproc.v1.ClusterConfig.security_config", - index=9, - number=16, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=853, - serialized_end=1595, -) - - -_AUTOSCALINGCONFIG = _descriptor.Descriptor( - name="AutoscalingConfig", - full_name="google.cloud.dataproc.v1.AutoscalingConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="policy_uri", - full_name="google.cloud.dataproc.v1.AutoscalingConfig.policy_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1597, - serialized_end=1641, -) - - -_ENCRYPTIONCONFIG = _descriptor.Descriptor( - name="EncryptionConfig", - full_name="google.cloud.dataproc.v1.EncryptionConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="gce_pd_kms_key_name", - full_name="google.cloud.dataproc.v1.EncryptionConfig.gce_pd_kms_key_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1643, - serialized_end=1695, -) - - -_GCECLUSTERCONFIG_METADATAENTRY = _descriptor.Descriptor( - name="MetadataEntry", - full_name="google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1984, - serialized_end=2031, -) - -_GCECLUSTERCONFIG = _descriptor.Descriptor( - name="GceClusterConfig", - full_name="google.cloud.dataproc.v1.GceClusterConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="zone_uri", - full_name="google.cloud.dataproc.v1.GceClusterConfig.zone_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="network_uri", - full_name="google.cloud.dataproc.v1.GceClusterConfig.network_uri", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="subnetwork_uri", - full_name="google.cloud.dataproc.v1.GceClusterConfig.subnetwork_uri", - index=2, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="internal_ip_only", - full_name="google.cloud.dataproc.v1.GceClusterConfig.internal_ip_only", - index=3, - number=7, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_account", - full_name="google.cloud.dataproc.v1.GceClusterConfig.service_account", - index=4, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_account_scopes", - full_name="google.cloud.dataproc.v1.GceClusterConfig.service_account_scopes", - index=5, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="tags", - full_name="google.cloud.dataproc.v1.GceClusterConfig.tags", - index=6, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metadata", - full_name="google.cloud.dataproc.v1.GceClusterConfig.metadata", - index=7, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_GCECLUSTERCONFIG_METADATAENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1698, - serialized_end=2031, -) - - -_INSTANCEGROUPCONFIG = _descriptor.Descriptor( - name="InstanceGroupConfig", - full_name="google.cloud.dataproc.v1.InstanceGroupConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="num_instances", - full_name="google.cloud.dataproc.v1.InstanceGroupConfig.num_instances", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="instance_names", - full_name="google.cloud.dataproc.v1.InstanceGroupConfig.instance_names", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="image_uri", - full_name="google.cloud.dataproc.v1.InstanceGroupConfig.image_uri", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="machine_type_uri", - full_name="google.cloud.dataproc.v1.InstanceGroupConfig.machine_type_uri", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="disk_config", - full_name="google.cloud.dataproc.v1.InstanceGroupConfig.disk_config", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="is_preemptible", - full_name="google.cloud.dataproc.v1.InstanceGroupConfig.is_preemptible", - index=5, - number=6, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="managed_group_config", - full_name="google.cloud.dataproc.v1.InstanceGroupConfig.managed_group_config", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="accelerators", - full_name="google.cloud.dataproc.v1.InstanceGroupConfig.accelerators", - index=7, - number=8, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="min_cpu_platform", - full_name="google.cloud.dataproc.v1.InstanceGroupConfig.min_cpu_platform", - index=8, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2034, - serialized_end=2444, -) - - -_MANAGEDGROUPCONFIG = _descriptor.Descriptor( - name="ManagedGroupConfig", - full_name="google.cloud.dataproc.v1.ManagedGroupConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="instance_template_name", - full_name="google.cloud.dataproc.v1.ManagedGroupConfig.instance_template_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="instance_group_manager_name", - full_name="google.cloud.dataproc.v1.ManagedGroupConfig.instance_group_manager_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2446, - serialized_end=2545, -) - - -_ACCELERATORCONFIG = _descriptor.Descriptor( - name="AcceleratorConfig", - full_name="google.cloud.dataproc.v1.AcceleratorConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="accelerator_type_uri", - full_name="google.cloud.dataproc.v1.AcceleratorConfig.accelerator_type_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="accelerator_count", - full_name="google.cloud.dataproc.v1.AcceleratorConfig.accelerator_count", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2547, - serialized_end=2623, -) - - -_DISKCONFIG = _descriptor.Descriptor( - name="DiskConfig", - full_name="google.cloud.dataproc.v1.DiskConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="boot_disk_type", - full_name="google.cloud.dataproc.v1.DiskConfig.boot_disk_type", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="boot_disk_size_gb", - full_name="google.cloud.dataproc.v1.DiskConfig.boot_disk_size_gb", - index=1, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="num_local_ssds", - full_name="google.cloud.dataproc.v1.DiskConfig.num_local_ssds", - index=2, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2625, - serialized_end=2727, -) - - -_NODEINITIALIZATIONACTION = _descriptor.Descriptor( - name="NodeInitializationAction", - full_name="google.cloud.dataproc.v1.NodeInitializationAction", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="executable_file", - full_name="google.cloud.dataproc.v1.NodeInitializationAction.executable_file", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="execution_timeout", - full_name="google.cloud.dataproc.v1.NodeInitializationAction.execution_timeout", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2729, - serialized_end=2844, -) - - -_CLUSTERSTATUS = _descriptor.Descriptor( - name="ClusterStatus", - full_name="google.cloud.dataproc.v1.ClusterStatus", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.dataproc.v1.ClusterStatus.state", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="detail", - full_name="google.cloud.dataproc.v1.ClusterStatus.detail", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="state_start_time", - full_name="google.cloud.dataproc.v1.ClusterStatus.state_start_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="substate", - full_name="google.cloud.dataproc.v1.ClusterStatus.substate", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_CLUSTERSTATUS_STATE, _CLUSTERSTATUS_SUBSTATE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2847, - serialized_end=3235, -) - - -_SECURITYCONFIG = _descriptor.Descriptor( - name="SecurityConfig", - full_name="google.cloud.dataproc.v1.SecurityConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="kerberos_config", - full_name="google.cloud.dataproc.v1.SecurityConfig.kerberos_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3237, - serialized_end=3320, -) - - -_KERBEROSCONFIG = _descriptor.Descriptor( - name="KerberosConfig", - full_name="google.cloud.dataproc.v1.KerberosConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="enable_kerberos", - full_name="google.cloud.dataproc.v1.KerberosConfig.enable_kerberos", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="root_principal_password_uri", - full_name="google.cloud.dataproc.v1.KerberosConfig.root_principal_password_uri", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="kms_key_uri", - full_name="google.cloud.dataproc.v1.KerberosConfig.kms_key_uri", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="keystore_uri", - full_name="google.cloud.dataproc.v1.KerberosConfig.keystore_uri", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="truststore_uri", - full_name="google.cloud.dataproc.v1.KerberosConfig.truststore_uri", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="keystore_password_uri", - full_name="google.cloud.dataproc.v1.KerberosConfig.keystore_password_uri", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="key_password_uri", - full_name="google.cloud.dataproc.v1.KerberosConfig.key_password_uri", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="truststore_password_uri", - full_name="google.cloud.dataproc.v1.KerberosConfig.truststore_password_uri", - index=7, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cross_realm_trust_realm", - full_name="google.cloud.dataproc.v1.KerberosConfig.cross_realm_trust_realm", - index=8, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cross_realm_trust_kdc", - full_name="google.cloud.dataproc.v1.KerberosConfig.cross_realm_trust_kdc", - index=9, - number=10, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cross_realm_trust_admin_server", - full_name="google.cloud.dataproc.v1.KerberosConfig.cross_realm_trust_admin_server", - index=10, - number=11, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cross_realm_trust_shared_password_uri", - full_name="google.cloud.dataproc.v1.KerberosConfig.cross_realm_trust_shared_password_uri", - index=11, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="kdc_db_key_uri", - full_name="google.cloud.dataproc.v1.KerberosConfig.kdc_db_key_uri", - index=12, - number=13, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="tgt_lifetime_hours", - full_name="google.cloud.dataproc.v1.KerberosConfig.tgt_lifetime_hours", - index=13, - number=14, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="realm", - full_name="google.cloud.dataproc.v1.KerberosConfig.realm", - index=14, - number=15, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3323, - serialized_end=3851, -) - - -_SOFTWARECONFIG_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4054, - serialized_end=4103, -) - -_SOFTWARECONFIG = _descriptor.Descriptor( - name="SoftwareConfig", - full_name="google.cloud.dataproc.v1.SoftwareConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="image_version", - full_name="google.cloud.dataproc.v1.SoftwareConfig.image_version", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1.SoftwareConfig.properties", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="optional_components", - full_name="google.cloud.dataproc.v1.SoftwareConfig.optional_components", - index=2, - number=3, - type=14, - cpp_type=8, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_SOFTWARECONFIG_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3854, - serialized_end=4103, -) - - -_CLUSTERMETRICS_HDFSMETRICSENTRY = _descriptor.Descriptor( - name="HdfsMetricsEntry", - full_name="google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry.value", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4286, - serialized_end=4336, -) - -_CLUSTERMETRICS_YARNMETRICSENTRY = _descriptor.Descriptor( - name="YarnMetricsEntry", - full_name="google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry.value", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4338, - serialized_end=4388, -) - -_CLUSTERMETRICS = _descriptor.Descriptor( - name="ClusterMetrics", - full_name="google.cloud.dataproc.v1.ClusterMetrics", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="hdfs_metrics", - full_name="google.cloud.dataproc.v1.ClusterMetrics.hdfs_metrics", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="yarn_metrics", - full_name="google.cloud.dataproc.v1.ClusterMetrics.yarn_metrics", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_CLUSTERMETRICS_HDFSMETRICSENTRY, _CLUSTERMETRICS_YARNMETRICSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4106, - serialized_end=4388, -) - - -_CREATECLUSTERREQUEST = _descriptor.Descriptor( - name="CreateClusterRequest", - full_name="google.cloud.dataproc.v1.CreateClusterRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.CreateClusterRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1.CreateClusterRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster", - full_name="google.cloud.dataproc.v1.CreateClusterRequest.cluster", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_id", - full_name="google.cloud.dataproc.v1.CreateClusterRequest.request_id", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4391, - serialized_end=4541, -) - - -_UPDATECLUSTERREQUEST = _descriptor.Descriptor( - name="UpdateClusterRequest", - full_name="google.cloud.dataproc.v1.UpdateClusterRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.UpdateClusterRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1.UpdateClusterRequest.region", - index=1, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1.UpdateClusterRequest.cluster_name", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster", - full_name="google.cloud.dataproc.v1.UpdateClusterRequest.cluster", - index=3, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="graceful_decommission_timeout", - full_name="google.cloud.dataproc.v1.UpdateClusterRequest.graceful_decommission_timeout", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.dataproc.v1.UpdateClusterRequest.update_mask", - index=5, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_id", - full_name="google.cloud.dataproc.v1.UpdateClusterRequest.request_id", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4544, - serialized_end=4846, -) - - -_DELETECLUSTERREQUEST = _descriptor.Descriptor( - name="DeleteClusterRequest", - full_name="google.cloud.dataproc.v1.DeleteClusterRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.DeleteClusterRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1.DeleteClusterRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1.DeleteClusterRequest.cluster_name", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_uuid", - full_name="google.cloud.dataproc.v1.DeleteClusterRequest.cluster_uuid", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_id", - full_name="google.cloud.dataproc.v1.DeleteClusterRequest.request_id", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4849, - serialized_end=4996, -) - - -_GETCLUSTERREQUEST = _descriptor.Descriptor( - name="GetClusterRequest", - full_name="google.cloud.dataproc.v1.GetClusterRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.GetClusterRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1.GetClusterRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1.GetClusterRequest.cluster_name", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4998, - serialized_end=5090, -) - - -_LISTCLUSTERSREQUEST = _descriptor.Descriptor( - name="ListClustersRequest", - full_name="google.cloud.dataproc.v1.ListClustersRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.ListClustersRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1.ListClustersRequest.region", - index=1, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.dataproc.v1.ListClustersRequest.filter", - index=2, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.dataproc.v1.ListClustersRequest.page_size", - index=3, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.dataproc.v1.ListClustersRequest.page_token", - index=4, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5093, - serialized_end=5230, -) - - -_LISTCLUSTERSRESPONSE = _descriptor.Descriptor( - name="ListClustersResponse", - full_name="google.cloud.dataproc.v1.ListClustersResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="clusters", - full_name="google.cloud.dataproc.v1.ListClustersResponse.clusters", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.dataproc.v1.ListClustersResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5232, - serialized_end=5342, -) - - -_DIAGNOSECLUSTERREQUEST = _descriptor.Descriptor( - name="DiagnoseClusterRequest", - full_name="google.cloud.dataproc.v1.DiagnoseClusterRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.DiagnoseClusterRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1.DiagnoseClusterRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1.DiagnoseClusterRequest.cluster_name", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5344, - serialized_end=5441, -) - - -_DIAGNOSECLUSTERRESULTS = _descriptor.Descriptor( - name="DiagnoseClusterResults", - full_name="google.cloud.dataproc.v1.DiagnoseClusterResults", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="output_uri", - full_name="google.cloud.dataproc.v1.DiagnoseClusterResults.output_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5443, - serialized_end=5492, -) - -_CLUSTER_LABELSENTRY.containing_type = _CLUSTER -_CLUSTER.fields_by_name["config"].message_type = _CLUSTERCONFIG -_CLUSTER.fields_by_name["labels"].message_type = _CLUSTER_LABELSENTRY -_CLUSTER.fields_by_name["status"].message_type = _CLUSTERSTATUS -_CLUSTER.fields_by_name["status_history"].message_type = _CLUSTERSTATUS -_CLUSTER.fields_by_name["metrics"].message_type = _CLUSTERMETRICS -_CLUSTERCONFIG.fields_by_name["gce_cluster_config"].message_type = _GCECLUSTERCONFIG -_CLUSTERCONFIG.fields_by_name["master_config"].message_type = _INSTANCEGROUPCONFIG -_CLUSTERCONFIG.fields_by_name["worker_config"].message_type = _INSTANCEGROUPCONFIG -_CLUSTERCONFIG.fields_by_name[ - "secondary_worker_config" -].message_type = _INSTANCEGROUPCONFIG -_CLUSTERCONFIG.fields_by_name["software_config"].message_type = _SOFTWARECONFIG -_CLUSTERCONFIG.fields_by_name[ - "initialization_actions" -].message_type = _NODEINITIALIZATIONACTION -_CLUSTERCONFIG.fields_by_name["encryption_config"].message_type = _ENCRYPTIONCONFIG -_CLUSTERCONFIG.fields_by_name["autoscaling_config"].message_type = _AUTOSCALINGCONFIG -_CLUSTERCONFIG.fields_by_name["security_config"].message_type = _SECURITYCONFIG -_GCECLUSTERCONFIG_METADATAENTRY.containing_type = _GCECLUSTERCONFIG -_GCECLUSTERCONFIG.fields_by_name[ - "metadata" -].message_type = _GCECLUSTERCONFIG_METADATAENTRY -_INSTANCEGROUPCONFIG.fields_by_name["disk_config"].message_type = _DISKCONFIG -_INSTANCEGROUPCONFIG.fields_by_name[ - "managed_group_config" -].message_type = _MANAGEDGROUPCONFIG -_INSTANCEGROUPCONFIG.fields_by_name["accelerators"].message_type = _ACCELERATORCONFIG -_NODEINITIALIZATIONACTION.fields_by_name[ - "execution_timeout" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_CLUSTERSTATUS.fields_by_name["state"].enum_type = _CLUSTERSTATUS_STATE -_CLUSTERSTATUS.fields_by_name[ - "state_start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_CLUSTERSTATUS.fields_by_name["substate"].enum_type = _CLUSTERSTATUS_SUBSTATE -_CLUSTERSTATUS_STATE.containing_type = _CLUSTERSTATUS -_CLUSTERSTATUS_SUBSTATE.containing_type = _CLUSTERSTATUS -_SECURITYCONFIG.fields_by_name["kerberos_config"].message_type = _KERBEROSCONFIG -_SOFTWARECONFIG_PROPERTIESENTRY.containing_type = _SOFTWARECONFIG -_SOFTWARECONFIG.fields_by_name[ - "properties" -].message_type = _SOFTWARECONFIG_PROPERTIESENTRY -_SOFTWARECONFIG.fields_by_name[ - "optional_components" -].enum_type = google_dot_cloud_dot_dataproc__v1_dot_proto_dot_shared__pb2._COMPONENT -_CLUSTERMETRICS_HDFSMETRICSENTRY.containing_type = _CLUSTERMETRICS -_CLUSTERMETRICS_YARNMETRICSENTRY.containing_type = _CLUSTERMETRICS -_CLUSTERMETRICS.fields_by_name[ - "hdfs_metrics" -].message_type = _CLUSTERMETRICS_HDFSMETRICSENTRY -_CLUSTERMETRICS.fields_by_name[ - "yarn_metrics" -].message_type = _CLUSTERMETRICS_YARNMETRICSENTRY -_CREATECLUSTERREQUEST.fields_by_name["cluster"].message_type = _CLUSTER -_UPDATECLUSTERREQUEST.fields_by_name["cluster"].message_type = _CLUSTER -_UPDATECLUSTERREQUEST.fields_by_name[ - "graceful_decommission_timeout" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_UPDATECLUSTERREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTCLUSTERSRESPONSE.fields_by_name["clusters"].message_type = _CLUSTER -DESCRIPTOR.message_types_by_name["Cluster"] = _CLUSTER -DESCRIPTOR.message_types_by_name["ClusterConfig"] = _CLUSTERCONFIG -DESCRIPTOR.message_types_by_name["AutoscalingConfig"] = _AUTOSCALINGCONFIG -DESCRIPTOR.message_types_by_name["EncryptionConfig"] = _ENCRYPTIONCONFIG -DESCRIPTOR.message_types_by_name["GceClusterConfig"] = _GCECLUSTERCONFIG -DESCRIPTOR.message_types_by_name["InstanceGroupConfig"] = _INSTANCEGROUPCONFIG -DESCRIPTOR.message_types_by_name["ManagedGroupConfig"] = _MANAGEDGROUPCONFIG -DESCRIPTOR.message_types_by_name["AcceleratorConfig"] = _ACCELERATORCONFIG -DESCRIPTOR.message_types_by_name["DiskConfig"] = _DISKCONFIG -DESCRIPTOR.message_types_by_name["NodeInitializationAction"] = _NODEINITIALIZATIONACTION -DESCRIPTOR.message_types_by_name["ClusterStatus"] = _CLUSTERSTATUS -DESCRIPTOR.message_types_by_name["SecurityConfig"] = _SECURITYCONFIG -DESCRIPTOR.message_types_by_name["KerberosConfig"] = _KERBEROSCONFIG -DESCRIPTOR.message_types_by_name["SoftwareConfig"] = _SOFTWARECONFIG -DESCRIPTOR.message_types_by_name["ClusterMetrics"] = _CLUSTERMETRICS -DESCRIPTOR.message_types_by_name["CreateClusterRequest"] = _CREATECLUSTERREQUEST -DESCRIPTOR.message_types_by_name["UpdateClusterRequest"] = _UPDATECLUSTERREQUEST -DESCRIPTOR.message_types_by_name["DeleteClusterRequest"] = _DELETECLUSTERREQUEST -DESCRIPTOR.message_types_by_name["GetClusterRequest"] = _GETCLUSTERREQUEST -DESCRIPTOR.message_types_by_name["ListClustersRequest"] = _LISTCLUSTERSREQUEST -DESCRIPTOR.message_types_by_name["ListClustersResponse"] = _LISTCLUSTERSRESPONSE -DESCRIPTOR.message_types_by_name["DiagnoseClusterRequest"] = _DIAGNOSECLUSTERREQUEST -DESCRIPTOR.message_types_by_name["DiagnoseClusterResults"] = _DIAGNOSECLUSTERRESULTS -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Cluster = _reflection.GeneratedProtocolMessageType( - "Cluster", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTER_LABELSENTRY, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.Cluster.LabelsEntry) - ), - ), - DESCRIPTOR=_CLUSTER, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Describes the identifying information, config, and status - of a cluster of Compute Engine instances. - - - Attributes: - project_id: - Required. The Google Cloud Platform project ID that the - cluster belongs to. - cluster_name: - Required. The cluster name. Cluster names within a project - must be unique. Names of deleted clusters can be reused. - config: - Required. The cluster config. Note that Dataproc may set - default values, and values may change when clusters are - updated. - labels: - Optional. The labels to associate with this cluster. Label - **keys** must contain 1 to 63 characters, and must conform to - `RFC 1035 `__. Label - **values** may be empty, but, if present, must contain 1 to 63 - characters, and must conform to `RFC 1035 - `__. No more than 32 - labels can be associated with a cluster. - status: - Output only. Cluster status. - status_history: - Output only. The previous cluster status. - cluster_uuid: - Output only. A cluster UUID (Unique Universal Identifier). - Dataproc generates this value when it creates the cluster. - metrics: - Contains cluster daemon metrics such as HDFS and YARN stats. - **Beta Feature**: This report is available for testing - purposes only. It may be changed before final release. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.Cluster) - ), -) -_sym_db.RegisterMessage(Cluster) -_sym_db.RegisterMessage(Cluster.LabelsEntry) - -ClusterConfig = _reflection.GeneratedProtocolMessageType( - "ClusterConfig", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTERCONFIG, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""The cluster config. - - - Attributes: - config_bucket: - Optional. A Cloud Storage bucket used to stage job - dependencies, config files, and job driver console output. If - you do not specify a staging bucket, Cloud Dataproc will - determine a Cloud Storage location (US, ASIA, or EU) for your - cluster's staging bucket according to the Compute Engine zone - where your cluster is deployed, and then create and manage - this project-level, per-location bucket (see `Dataproc staging - bucket `__). - gce_cluster_config: - Optional. The shared Compute Engine config settings for all - instances in a cluster. - master_config: - Optional. The Compute Engine config settings for the master - instance in a cluster. - worker_config: - Optional. The Compute Engine config settings for worker - instances in a cluster. - secondary_worker_config: - Optional. The Compute Engine config settings for additional - worker instances in a cluster. - software_config: - Optional. The config settings for software inside the cluster. - initialization_actions: - Optional. Commands to execute on each node after config is - completed. By default, executables are run on master and all - worker nodes. You can test a node's ``role`` metadata to run - an executable on a master or worker node, as shown below using - ``curl`` (you can also use ``wget``): :: ROLE=$(curl -H - Metadata-Flavor:Google http://metadata/computeMetadata/v1/ - instance/attributes/dataproc-role) if [[ "${ROLE}" == - 'Master' ]]; then ... master specific actions ... - else ... worker specific actions ... fi - encryption_config: - Optional. Encryption settings for the cluster. - autoscaling_config: - Optional. Autoscaling config for the policy associated with - the cluster. Cluster does not autoscale if this field is - unset. - security_config: - Optional. Security settings for the cluster. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterConfig) - ), -) -_sym_db.RegisterMessage(ClusterConfig) - -AutoscalingConfig = _reflection.GeneratedProtocolMessageType( - "AutoscalingConfig", - (_message.Message,), - dict( - DESCRIPTOR=_AUTOSCALINGCONFIG, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Autoscaling Policy config associated with the cluster. - - - Attributes: - policy_uri: - Optional. The autoscaling policy used by the cluster. Only - resource names including projectid and location (region) are - valid. Examples: - ``https://www.googleapis.com/compute/v1/p - rojects/[project_id]/locations/[dataproc_region]/autoscalingPo - licies/[policy_id]`` - ``projects/[project_id]/locations/[dat - aproc_region]/autoscalingPolicies/[policy_id]`` Note that the - policy must be in the same project and Dataproc region. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.AutoscalingConfig) - ), -) -_sym_db.RegisterMessage(AutoscalingConfig) - -EncryptionConfig = _reflection.GeneratedProtocolMessageType( - "EncryptionConfig", - (_message.Message,), - dict( - DESCRIPTOR=_ENCRYPTIONCONFIG, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Encryption settings for the cluster. - - - Attributes: - gce_pd_kms_key_name: - Optional. The Cloud KMS key name to use for PD disk encryption - for all instances in the cluster. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.EncryptionConfig) - ), -) -_sym_db.RegisterMessage(EncryptionConfig) - -GceClusterConfig = _reflection.GeneratedProtocolMessageType( - "GceClusterConfig", - (_message.Message,), - dict( - MetadataEntry=_reflection.GeneratedProtocolMessageType( - "MetadataEntry", - (_message.Message,), - dict( - DESCRIPTOR=_GCECLUSTERCONFIG_METADATAENTRY, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry) - ), - ), - DESCRIPTOR=_GCECLUSTERCONFIG, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Common config settings for resources of Compute Engine - cluster instances, applicable to all instances in the cluster. - - - Attributes: - zone_uri: - Optional. The zone where the Compute Engine cluster will be - located. On a create request, it is required in the "global" - region. If omitted in a non-global Dataproc region, the - service will pick a zone in the corresponding Compute Engine - region. On a get request, zone will always be present. A full - URL, partial URI, or short name are valid. Examples: - ``htt - ps://www.googleapis.com/compute/v1/projects/[project_id]/zones - /[zone]`` - ``projects/[project_id]/zones/[zone]`` - ``us- - central1-f`` - network_uri: - Optional. The Compute Engine network to be used for machine - communications. Cannot be specified with subnetwork\_uri. If - neither ``network_uri`` nor ``subnetwork_uri`` is specified, - the "default" network of the project is used, if it exists. - Cannot be a "Custom Subnet Network" (see `Using Subnetworks - `__ for more information). A full - URL, partial URI, or short name are valid. Examples: - ``htt - ps://www.googleapis.com/compute/v1/projects/[project_id]/regio - ns/global/default`` - - ``projects/[project_id]/regions/global/default`` - - ``default`` - subnetwork_uri: - Optional. The Compute Engine subnetwork to be used for machine - communications. Cannot be specified with network\_uri. A full - URL, partial URI, or short name are valid. Examples: - ``htt - ps://www.googleapis.com/compute/v1/projects/[project_id]/regio - ns/us-east1/subnetworks/sub0`` - - ``projects/[project_id]/regions/us-east1/subnetworks/sub0`` - - ``sub0`` - internal_ip_only: - Optional. If true, all instances in the cluster will only have - internal IP addresses. By default, clusters are not restricted - to internal IP addresses, and will have ephemeral external IP - addresses assigned to each instance. This ``internal_ip_only`` - restriction can only be enabled for subnetwork enabled - networks, and all off-cluster dependencies must be configured - to be accessible without external IP addresses. - service_account: - Optional. The `Dataproc service account - `__ (also see `VM - Data Plane identity `__) used by - Dataproc cluster VM instances to access Google Cloud Platform - services. If not specified, the `Compute Engine default - service account `__ is used. - service_account_scopes: - Optional. The URIs of service account scopes to be included in - Compute Engine instances. The following base set of scopes is - always included: - - https://www.googleapis.com/auth/cloud.useraccounts.readonly - - https://www.googleapis.com/auth/devstorage.read\_write - - https://www.googleapis.com/auth/logging.write If no scopes - are specified, the following defaults are also provided: - - https://www.googleapis.com/auth/bigquery - - https://www.googleapis.com/auth/bigtable.admin.table - - https://www.googleapis.com/auth/bigtable.data - - https://www.googleapis.com/auth/devstorage.full\_control - tags: - The Compute Engine tags to add to all instances (see `Tagging - instances `__). - metadata: - The Compute Engine metadata entries to add to all instances - (see `Project and instance metadata - `__). - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.GceClusterConfig) - ), -) -_sym_db.RegisterMessage(GceClusterConfig) -_sym_db.RegisterMessage(GceClusterConfig.MetadataEntry) - -InstanceGroupConfig = _reflection.GeneratedProtocolMessageType( - "InstanceGroupConfig", - (_message.Message,), - dict( - DESCRIPTOR=_INSTANCEGROUPCONFIG, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Optional. The config settings for Compute Engine resources - in an instance group, such as a master or worker group. - - - Attributes: - num_instances: - Optional. The number of VM instances in the instance group. - For master instance groups, must be set to 1. - instance_names: - Output only. The list of instance names. Dataproc derives the - names from ``cluster_name``, ``num_instances``, and the - instance group. - image_uri: - Optional. The Compute Engine image resource used for cluster - instances. It can be specified or may be inferred from - ``SoftwareConfig.image_version``. - machine_type_uri: - Optional. The Compute Engine machine type used for cluster - instances. A full URL, partial URI, or short name are valid. - Examples: - ``https://www.googleapis.com/compute/v1/projects - /[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`` - - ``projects/[project_id]/zones/us- - east1-a/machineTypes/n1-standard-2`` - ``n1-standard-2`` - **Auto Zone Exception**: If you are using the Dataproc `Auto - Zone Placement `__ feature, you - must use the short name of the machine type resource, for - example, ``n1-standard-2``. - disk_config: - Optional. Disk option config settings. - is_preemptible: - Optional. Specifies that this instance group contains - preemptible instances. - managed_group_config: - Output only. The config for Compute Engine Instance Group - Manager that manages this group. This is only used for - preemptible instance groups. - accelerators: - Optional. The Compute Engine accelerator configuration for - these instances. - min_cpu_platform: - Optional. Specifies the minimum cpu platform for the Instance - Group. See [Dataproc→Minimum CPU Platform] - (/dataproc/docs/concepts/compute/dataproc-min-cpu). - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.InstanceGroupConfig) - ), -) -_sym_db.RegisterMessage(InstanceGroupConfig) - -ManagedGroupConfig = _reflection.GeneratedProtocolMessageType( - "ManagedGroupConfig", - (_message.Message,), - dict( - DESCRIPTOR=_MANAGEDGROUPCONFIG, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Specifies the resources used to actively manage an - instance group. - - - Attributes: - instance_template_name: - Output only. The name of the Instance Template used for the - Managed Instance Group. - instance_group_manager_name: - Output only. The name of the Instance Group Manager for this - group. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ManagedGroupConfig) - ), -) -_sym_db.RegisterMessage(ManagedGroupConfig) - -AcceleratorConfig = _reflection.GeneratedProtocolMessageType( - "AcceleratorConfig", - (_message.Message,), - dict( - DESCRIPTOR=_ACCELERATORCONFIG, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Specifies the type and number of accelerator cards - attached to the instances of an instance. See `GPUs on Compute - Engine `__. - - - Attributes: - accelerator_type_uri: - Full URL, partial URI, or short name of the accelerator type - resource to expose to this instance. See `Compute Engine - AcceleratorTypes - `__. Examples: - - ``https://www.googleapis.com/compute/beta/projects/[project - _id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`` - - ``projects/[project_id]/zones/us- - east1-a/acceleratorTypes/nvidia-tesla-k80`` - ``nvidia- - tesla-k80`` **Auto Zone Exception**: If you are using the - Dataproc `Auto Zone Placement - `__ feature, you must use the - short name of the accelerator type resource, for example, - ``nvidia-tesla-k80``. - accelerator_count: - The number of the accelerator cards of this type exposed to - this instance. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.AcceleratorConfig) - ), -) -_sym_db.RegisterMessage(AcceleratorConfig) - -DiskConfig = _reflection.GeneratedProtocolMessageType( - "DiskConfig", - (_message.Message,), - dict( - DESCRIPTOR=_DISKCONFIG, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Specifies the config of disk options for a group of VM - instances. - - - Attributes: - boot_disk_type: - Optional. Type of the boot disk (default is "pd-standard"). - Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or - "pd-standard" (Persistent Disk Hard Disk Drive). - boot_disk_size_gb: - Optional. Size in GB of the boot disk (default is 500GB). - num_local_ssds: - Optional. Number of attached SSDs, from 0 to 4 (default is 0). - If SSDs are not attached, the boot disk is used to store - runtime logs and `HDFS `__ data. If one or more SSDs are - attached, this runtime bulk data is spread across them, and - the boot disk contains only basic config and installed - binaries. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.DiskConfig) - ), -) -_sym_db.RegisterMessage(DiskConfig) - -NodeInitializationAction = _reflection.GeneratedProtocolMessageType( - "NodeInitializationAction", - (_message.Message,), - dict( - DESCRIPTOR=_NODEINITIALIZATIONACTION, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Specifies an executable to run on a fully configured node - and a timeout period for executable completion. - - - Attributes: - executable_file: - Required. Cloud Storage URI of executable file. - execution_timeout: - Optional. Amount of time executable has to complete. Default - is 10 minutes. Cluster creation fails with an explanatory - error message (the name of the executable that caused the - error and the exceeded timeout period) if the executable is - not completed at end of the timeout period. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.NodeInitializationAction) - ), -) -_sym_db.RegisterMessage(NodeInitializationAction) - -ClusterStatus = _reflection.GeneratedProtocolMessageType( - "ClusterStatus", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTERSTATUS, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""The status of a cluster and its instances. - - - Attributes: - state: - Output only. The cluster's state. - detail: - Optional. Output only. Details of cluster's state. - state_start_time: - Output only. Time when this state was entered. - substate: - Output only. Additional state information that includes status - reported by the agent. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterStatus) - ), -) -_sym_db.RegisterMessage(ClusterStatus) - -SecurityConfig = _reflection.GeneratedProtocolMessageType( - "SecurityConfig", - (_message.Message,), - dict( - DESCRIPTOR=_SECURITYCONFIG, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Security related configuration, including Kerberos. - - - Attributes: - kerberos_config: - Kerberos related configuration. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SecurityConfig) - ), -) -_sym_db.RegisterMessage(SecurityConfig) - -KerberosConfig = _reflection.GeneratedProtocolMessageType( - "KerberosConfig", - (_message.Message,), - dict( - DESCRIPTOR=_KERBEROSCONFIG, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Specifies Kerberos related configuration. - - - Attributes: - enable_kerberos: - Optional. Flag to indicate whether to Kerberize the cluster. - root_principal_password_uri: - Required. The Cloud Storage URI of a KMS encrypted file - containing the root principal password. - kms_key_uri: - Required. The uri of the KMS key used to encrypt various - sensitive files. - keystore_uri: - Optional. The Cloud Storage URI of the keystore file used for - SSL encryption. If not provided, Dataproc will provide a self- - signed certificate. - truststore_uri: - Optional. The Cloud Storage URI of the truststore file used - for SSL encryption. If not provided, Dataproc will provide a - self-signed certificate. - keystore_password_uri: - Optional. The Cloud Storage URI of a KMS encrypted file - containing the password to the user provided keystore. For the - self-signed certificate, this password is generated by - Dataproc. - key_password_uri: - Optional. The Cloud Storage URI of a KMS encrypted file - containing the password to the user provided key. For the - self-signed certificate, this password is generated by - Dataproc. - truststore_password_uri: - Optional. The Cloud Storage URI of a KMS encrypted file - containing the password to the user provided truststore. For - the self-signed certificate, this password is generated by - Dataproc. - cross_realm_trust_realm: - Optional. The remote realm the Dataproc on-cluster KDC will - trust, should the user enable cross realm trust. - cross_realm_trust_kdc: - Optional. The KDC (IP or hostname) for the remote trusted - realm in a cross realm trust relationship. - cross_realm_trust_admin_server: - Optional. The admin server (IP or hostname) for the remote - trusted realm in a cross realm trust relationship. - cross_realm_trust_shared_password_uri: - Optional. The Cloud Storage URI of a KMS encrypted file - containing the shared password between the on-cluster Kerberos - realm and the remote trusted realm, in a cross realm trust - relationship. - kdc_db_key_uri: - Optional. The Cloud Storage URI of a KMS encrypted file - containing the master key of the KDC database. - tgt_lifetime_hours: - Optional. The lifetime of the ticket granting ticket, in - hours. If not specified, or user specifies 0, then default - value 10 will be used. - realm: - Optional. The name of the on-cluster Kerberos realm. If not - specified, the uppercased domain of hostnames will be the - realm. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.KerberosConfig) - ), -) -_sym_db.RegisterMessage(KerberosConfig) - -SoftwareConfig = _reflection.GeneratedProtocolMessageType( - "SoftwareConfig", - (_message.Message,), - dict( - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_SOFTWARECONFIG_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntry) - ), - ), - DESCRIPTOR=_SOFTWARECONFIG, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Specifies the selection and config of software inside the - cluster. - - - Attributes: - image_version: - Optional. The version of software inside the cluster. It must - be one of the supported `Dataproc Versions - `__, such as "1.2" - (including a subminor version, such as "1.2.29"), or the - `"preview" version - `__. If unspecified, it defaults to - the latest Debian version. - properties: - Optional. The properties to set on daemon config files. - Property keys are specified in ``prefix:property`` format, for - example ``core:hadoop.tmp.dir``. The following are supported - prefixes and their mappings: - capacity-scheduler: - ``capacity-scheduler.xml`` - core: ``core-site.xml`` - - distcp: ``distcp-default.xml`` - hdfs: ``hdfs-site.xml`` - - hive: ``hive-site.xml`` - mapred: ``mapred-site.xml`` - pig: - ``pig.properties`` - spark: ``spark-defaults.conf`` - yarn: - ``yarn-site.xml`` For more information, see `Cluster - properties `__. - optional_components: - Optional. The set of components to activate on the cluster. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SoftwareConfig) - ), -) -_sym_db.RegisterMessage(SoftwareConfig) -_sym_db.RegisterMessage(SoftwareConfig.PropertiesEntry) - -ClusterMetrics = _reflection.GeneratedProtocolMessageType( - "ClusterMetrics", - (_message.Message,), - dict( - HdfsMetricsEntry=_reflection.GeneratedProtocolMessageType( - "HdfsMetricsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTERMETRICS_HDFSMETRICSENTRY, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry) - ), - ), - YarnMetricsEntry=_reflection.GeneratedProtocolMessageType( - "YarnMetricsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTERMETRICS_YARNMETRICSENTRY, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry) - ), - ), - DESCRIPTOR=_CLUSTERMETRICS, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Contains cluster daemon metrics, such as HDFS and YARN - stats. - - **Beta Feature**: This report is available for testing purposes only. It - may be changed before final release. - - - Attributes: - hdfs_metrics: - The HDFS metrics. - yarn_metrics: - The YARN metrics. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterMetrics) - ), -) -_sym_db.RegisterMessage(ClusterMetrics) -_sym_db.RegisterMessage(ClusterMetrics.HdfsMetricsEntry) -_sym_db.RegisterMessage(ClusterMetrics.YarnMetricsEntry) - -CreateClusterRequest = _reflection.GeneratedProtocolMessageType( - "CreateClusterRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATECLUSTERREQUEST, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""A request to create a cluster. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - cluster belongs to. - region: - Required. The Dataproc region in which to handle the request. - cluster: - Required. The cluster to create. - request_id: - Optional. A unique id used to identify the request. If the - server receives two [CreateClusterRequest][google.cloud.datapr - oc.v1.CreateClusterRequest] requests with the same id, then - the second request will be ignored and the first - [google.longrunning.Operation][google.longrunning.Operation] - created and stored in the backend is returned. It is - recommended to always set this value to a `UUID `__. The id - must contain only letters (a-z, A-Z), numbers (0-9), - underscores (\_), and hyphens (-). The maximum length is 40 - characters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.CreateClusterRequest) - ), -) -_sym_db.RegisterMessage(CreateClusterRequest) - -UpdateClusterRequest = _reflection.GeneratedProtocolMessageType( - "UpdateClusterRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATECLUSTERREQUEST, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""A request to update a cluster. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project the - cluster belongs to. - region: - Required. The Dataproc region in which to handle the request. - cluster_name: - Required. The cluster name. - cluster: - Required. The changes to the cluster. - graceful_decommission_timeout: - Optional. Timeout for graceful YARN decomissioning. Graceful - decommissioning allows removing nodes from the cluster without - interrupting jobs in progress. Timeout specifies how long to - wait for jobs in progress to finish before forcefully removing - nodes (and potentially interrupting jobs). Default timeout is - 0 (for forceful decommission), and the maximum allowed timeout - is 1 day. Only supported on Dataproc image versions 1.2 and - higher. - update_mask: - Required. Specifies the path, relative to ``Cluster``, of the - field to update. For example, to change the number of workers - in a cluster to 5, the ``update_mask`` parameter would be - specified as ``config.worker_config.num_instances``, and the - ``PATCH`` request body would specify the new value, as - follows: :: { "config":{ "workerConfig":{ - "numInstances":"5" } } } Similarly, to - change the number of preemptible workers in a cluster to 5, - the ``update_mask`` parameter would be - ``config.secondary_worker_config.num_instances``, and the - ``PATCH`` request body would be set as follows: :: { - "config":{ "secondaryWorkerConfig":{ - "numInstances":"5" } } } Note: Currently, - only the following fields can be updated: .. raw:: html - .. raw:: html .. raw:: html .. - raw:: html .. raw:: - html .. raw:: html - .. raw:: html .. raw:: html .. raw:: html .. raw:: html .. raw:: - html .. raw:: html - .. raw:: html .. raw:: html .. raw:: html - .. raw:: html .. raw:: html .. raw:: html .. raw:: - html .. raw:: html .. raw:: html .. raw:: html - .. raw:: html .. raw:: html
Mask .. raw:: html Purpose .. raw:: html
labels - .. raw:: html Update labels - .. raw:: html
- config.worker\_config.num\_instances .. raw:: html Resize primary worker group .. raw:: - html
- config.secondary\_worker\_config.num\_instances .. raw:: html - Resize secondary worker group - .. raw:: html
- config.autoscaling\_config.policy\_uri .. raw:: html - Use, stop using, or change - autoscaling policies .. raw:: html
- request_id: - Optional. A unique id used to identify the request. If the - server receives two [UpdateClusterRequest][google.cloud.datapr - oc.v1.UpdateClusterRequest] requests with the same id, then - the second request will be ignored and the first - [google.longrunning.Operation][google.longrunning.Operation] - created and stored in the backend is returned. It is - recommended to always set this value to a `UUID `__. The id - must contain only letters (a-z, A-Z), numbers (0-9), - underscores (\_), and hyphens (-). The maximum length is 40 - characters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.UpdateClusterRequest) - ), -) -_sym_db.RegisterMessage(UpdateClusterRequest) - -DeleteClusterRequest = _reflection.GeneratedProtocolMessageType( - "DeleteClusterRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETECLUSTERREQUEST, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""A request to delete a cluster. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - cluster belongs to. - region: - Required. The Dataproc region in which to handle the request. - cluster_name: - Required. The cluster name. - cluster_uuid: - Optional. Specifying the ``cluster_uuid`` means the RPC should - fail (with error NOT\_FOUND) if cluster with specified UUID - does not exist. - request_id: - Optional. A unique id used to identify the request. If the - server receives two [DeleteClusterRequest][google.cloud.datapr - oc.v1.DeleteClusterRequest] requests with the same id, then - the second request will be ignored and the first - [google.longrunning.Operation][google.longrunning.Operation] - created and stored in the backend is returned. It is - recommended to always set this value to a `UUID `__. The id - must contain only letters (a-z, A-Z), numbers (0-9), - underscores (\_), and hyphens (-). The maximum length is 40 - characters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.DeleteClusterRequest) - ), -) -_sym_db.RegisterMessage(DeleteClusterRequest) - -GetClusterRequest = _reflection.GeneratedProtocolMessageType( - "GetClusterRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETCLUSTERREQUEST, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Request to get the resource representation for a cluster - in a project. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - cluster belongs to. - region: - Required. The Dataproc region in which to handle the request. - cluster_name: - Required. The cluster name. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.GetClusterRequest) - ), -) -_sym_db.RegisterMessage(GetClusterRequest) - -ListClustersRequest = _reflection.GeneratedProtocolMessageType( - "ListClustersRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTCLUSTERSREQUEST, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""A request to list the clusters in a project. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - cluster belongs to. - region: - Required. The Dataproc region in which to handle the request. - filter: - Optional. A filter constraining the clusters to list. Filters - are case-sensitive and have the following syntax: field = - value [AND [field = value]] ... where **field** is one of - ``status.state``, ``clusterName``, or ``labels.[KEY]``, and - ``[KEY]`` is a label key. **value** can be ``*`` to match all - values. ``status.state`` can be one of the following: - ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, - ``ERROR``, ``DELETING``, or ``UPDATING``. ``ACTIVE`` contains - the ``CREATING``, ``UPDATING``, and ``RUNNING`` states. - ``INACTIVE`` contains the ``DELETING`` and ``ERROR`` states. - ``clusterName`` is the name of the cluster provided at - creation time. Only the logical ``AND`` operator is supported; - space-separated items are treated as having an implicit - ``AND`` operator. Example filter: status.state = ACTIVE AND - clusterName = mycluster AND labels.env = staging AND - labels.starred = \* - page_size: - Optional. The standard List page size. - page_token: - Optional. The standard List page token. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ListClustersRequest) - ), -) -_sym_db.RegisterMessage(ListClustersRequest) - -ListClustersResponse = _reflection.GeneratedProtocolMessageType( - "ListClustersResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTCLUSTERSRESPONSE, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""The list of all clusters in a project. - - - Attributes: - clusters: - Output only. The clusters in the project. - next_page_token: - Output only. This token is included in the response if there - are more results to fetch. To fetch additional results, - provide this value as the ``page_token`` in a subsequent - ``ListClustersRequest``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ListClustersResponse) - ), -) -_sym_db.RegisterMessage(ListClustersResponse) - -DiagnoseClusterRequest = _reflection.GeneratedProtocolMessageType( - "DiagnoseClusterRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DIAGNOSECLUSTERREQUEST, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""A request to collect cluster diagnostic information. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - cluster belongs to. - region: - Required. The Dataproc region in which to handle the request. - cluster_name: - Required. The cluster name. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.DiagnoseClusterRequest) - ), -) -_sym_db.RegisterMessage(DiagnoseClusterRequest) - -DiagnoseClusterResults = _reflection.GeneratedProtocolMessageType( - "DiagnoseClusterResults", - (_message.Message,), - dict( - DESCRIPTOR=_DIAGNOSECLUSTERRESULTS, - __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""The location of diagnostic output. - - - Attributes: - output_uri: - Output only. The Cloud Storage URI of the diagnostic output. - The output report is a plain text file with a summary of - collected diagnostics. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.DiagnoseClusterResults) - ), -) -_sym_db.RegisterMessage(DiagnoseClusterResults) - - -DESCRIPTOR._options = None -_CLUSTER_LABELSENTRY._options = None -_CLUSTER.fields_by_name["project_id"]._options = None -_CLUSTER.fields_by_name["cluster_name"]._options = None -_CLUSTER.fields_by_name["config"]._options = None -_CLUSTER.fields_by_name["labels"]._options = None -_CLUSTER.fields_by_name["status"]._options = None -_CLUSTER.fields_by_name["status_history"]._options = None -_CLUSTER.fields_by_name["cluster_uuid"]._options = None -_CLUSTERCONFIG.fields_by_name["config_bucket"]._options = None -_CLUSTERCONFIG.fields_by_name["gce_cluster_config"]._options = None -_CLUSTERCONFIG.fields_by_name["master_config"]._options = None -_CLUSTERCONFIG.fields_by_name["worker_config"]._options = None -_CLUSTERCONFIG.fields_by_name["secondary_worker_config"]._options = None -_CLUSTERCONFIG.fields_by_name["software_config"]._options = None -_CLUSTERCONFIG.fields_by_name["initialization_actions"]._options = None -_CLUSTERCONFIG.fields_by_name["encryption_config"]._options = None -_CLUSTERCONFIG.fields_by_name["autoscaling_config"]._options = None -_CLUSTERCONFIG.fields_by_name["security_config"]._options = None -_AUTOSCALINGCONFIG.fields_by_name["policy_uri"]._options = None -_ENCRYPTIONCONFIG.fields_by_name["gce_pd_kms_key_name"]._options = None -_GCECLUSTERCONFIG_METADATAENTRY._options = None -_GCECLUSTERCONFIG.fields_by_name["zone_uri"]._options = None -_GCECLUSTERCONFIG.fields_by_name["network_uri"]._options = None -_GCECLUSTERCONFIG.fields_by_name["subnetwork_uri"]._options = None -_GCECLUSTERCONFIG.fields_by_name["internal_ip_only"]._options = None -_GCECLUSTERCONFIG.fields_by_name["service_account"]._options = None -_GCECLUSTERCONFIG.fields_by_name["service_account_scopes"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["num_instances"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["instance_names"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["image_uri"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["machine_type_uri"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["disk_config"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["is_preemptible"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["managed_group_config"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["accelerators"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["min_cpu_platform"]._options = None -_MANAGEDGROUPCONFIG.fields_by_name["instance_template_name"]._options = None -_MANAGEDGROUPCONFIG.fields_by_name["instance_group_manager_name"]._options = None -_DISKCONFIG.fields_by_name["boot_disk_type"]._options = None -_DISKCONFIG.fields_by_name["boot_disk_size_gb"]._options = None -_DISKCONFIG.fields_by_name["num_local_ssds"]._options = None -_NODEINITIALIZATIONACTION.fields_by_name["executable_file"]._options = None -_NODEINITIALIZATIONACTION.fields_by_name["execution_timeout"]._options = None -_CLUSTERSTATUS.fields_by_name["state"]._options = None -_CLUSTERSTATUS.fields_by_name["detail"]._options = None -_CLUSTERSTATUS.fields_by_name["state_start_time"]._options = None -_CLUSTERSTATUS.fields_by_name["substate"]._options = None -_KERBEROSCONFIG.fields_by_name["enable_kerberos"]._options = None -_KERBEROSCONFIG.fields_by_name["root_principal_password_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["kms_key_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["keystore_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["truststore_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["keystore_password_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["key_password_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["truststore_password_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["cross_realm_trust_realm"]._options = None -_KERBEROSCONFIG.fields_by_name["cross_realm_trust_kdc"]._options = None -_KERBEROSCONFIG.fields_by_name["cross_realm_trust_admin_server"]._options = None -_KERBEROSCONFIG.fields_by_name["cross_realm_trust_shared_password_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["kdc_db_key_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["tgt_lifetime_hours"]._options = None -_KERBEROSCONFIG.fields_by_name["realm"]._options = None -_SOFTWARECONFIG_PROPERTIESENTRY._options = None -_SOFTWARECONFIG.fields_by_name["image_version"]._options = None -_SOFTWARECONFIG.fields_by_name["properties"]._options = None -_SOFTWARECONFIG.fields_by_name["optional_components"]._options = None -_CLUSTERMETRICS_HDFSMETRICSENTRY._options = None -_CLUSTERMETRICS_YARNMETRICSENTRY._options = None -_CREATECLUSTERREQUEST.fields_by_name["project_id"]._options = None -_CREATECLUSTERREQUEST.fields_by_name["region"]._options = None -_CREATECLUSTERREQUEST.fields_by_name["cluster"]._options = None -_CREATECLUSTERREQUEST.fields_by_name["request_id"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["project_id"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["region"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["cluster"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["graceful_decommission_timeout"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["update_mask"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["request_id"]._options = None -_DELETECLUSTERREQUEST.fields_by_name["project_id"]._options = None -_DELETECLUSTERREQUEST.fields_by_name["region"]._options = None -_DELETECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None -_DELETECLUSTERREQUEST.fields_by_name["cluster_uuid"]._options = None -_DELETECLUSTERREQUEST.fields_by_name["request_id"]._options = None -_GETCLUSTERREQUEST.fields_by_name["project_id"]._options = None -_GETCLUSTERREQUEST.fields_by_name["region"]._options = None -_GETCLUSTERREQUEST.fields_by_name["cluster_name"]._options = None -_LISTCLUSTERSREQUEST.fields_by_name["project_id"]._options = None -_LISTCLUSTERSREQUEST.fields_by_name["region"]._options = None -_LISTCLUSTERSREQUEST.fields_by_name["filter"]._options = None -_LISTCLUSTERSREQUEST.fields_by_name["page_size"]._options = None -_LISTCLUSTERSREQUEST.fields_by_name["page_token"]._options = None -_LISTCLUSTERSRESPONSE.fields_by_name["clusters"]._options = None -_LISTCLUSTERSRESPONSE.fields_by_name["next_page_token"]._options = None -_DIAGNOSECLUSTERREQUEST.fields_by_name["project_id"]._options = None -_DIAGNOSECLUSTERREQUEST.fields_by_name["region"]._options = None -_DIAGNOSECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None -_DIAGNOSECLUSTERRESULTS.fields_by_name["output_uri"]._options = None - -_CLUSTERCONTROLLER = _descriptor.ServiceDescriptor( - name="ClusterController", - full_name="google.cloud.dataproc.v1.ClusterController", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" - ), - serialized_start=5495, - serialized_end=7130, - methods=[ - _descriptor.MethodDescriptor( - name="CreateCluster", - full_name="google.cloud.dataproc.v1.ClusterController.CreateCluster", - index=0, - containing_service=None, - input_type=_CREATECLUSTERREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - '\202\323\344\223\002>"3/v1/projects/{project_id}/regions/{region}/clusters:\007cluster\332A\031project_id,region,cluster\312A<\n\007Cluster\0221google.cloud.dataproc.v1.ClusterOperationMetadata' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateCluster", - full_name="google.cloud.dataproc.v1.ClusterController.UpdateCluster", - index=1, - containing_service=None, - input_type=_UPDATECLUSTERREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - "\202\323\344\223\002M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster\312A<\n\007Cluster\0221google.cloud.dataproc.v1.ClusterOperationMetadata\332A2project_id,region,cluster_name,cluster,update_mask" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteCluster", - full_name="google.cloud.dataproc.v1.ClusterController.DeleteCluster", - index=2, - containing_service=None, - input_type=_DELETECLUSTERREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - "\202\323\344\223\002D*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\332A\036project_id,region,cluster_name\312AJ\n\025google.protobuf.Empty\0221google.cloud.dataproc.v1.ClusterOperationMetadata" - ), - ), - _descriptor.MethodDescriptor( - name="GetCluster", - full_name="google.cloud.dataproc.v1.ClusterController.GetCluster", - index=3, - containing_service=None, - input_type=_GETCLUSTERREQUEST, - output_type=_CLUSTER, - serialized_options=_b( - "\202\323\344\223\002D\022B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\332A\036project_id,region,cluster_name" - ), - ), - _descriptor.MethodDescriptor( - name="ListClusters", - full_name="google.cloud.dataproc.v1.ClusterController.ListClusters", - index=4, - containing_service=None, - input_type=_LISTCLUSTERSREQUEST, - output_type=_LISTCLUSTERSRESPONSE, - serialized_options=_b( - "\202\323\344\223\0025\0223/v1/projects/{project_id}/regions/{region}/clusters\332A\021project_id,region\332A\030project_id,region,filter" - ), - ), - _descriptor.MethodDescriptor( - name="DiagnoseCluster", - full_name="google.cloud.dataproc.v1.ClusterController.DiagnoseCluster", - index=5, - containing_service=None, - input_type=_DIAGNOSECLUSTERREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - '\202\323\344\223\002P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\001*\332A\036project_id,region,cluster_name\312A/\n\025google.protobuf.Empty\022\026DiagnoseClusterResults' - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_CLUSTERCONTROLLER) - -DESCRIPTOR.services_by_name["ClusterController"] = _CLUSTERCONTROLLER - -# @@protoc_insertion_point(module_scope) diff --git a/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py deleted file mode 100644 index def69f148416..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py +++ /dev/null @@ -1,151 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.dataproc_v1.proto import ( - clusters_pb2 as google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) - - -class ClusterControllerStub(object): - """The ClusterControllerService provides methods to manage clusters - of Compute Engine instances. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateCluster = channel.unary_unary( - "/google.cloud.dataproc.v1.ClusterController/CreateCluster", - request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.CreateClusterRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.UpdateCluster = channel.unary_unary( - "/google.cloud.dataproc.v1.ClusterController/UpdateCluster", - request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.UpdateClusterRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.DeleteCluster = channel.unary_unary( - "/google.cloud.dataproc.v1.ClusterController/DeleteCluster", - request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.DeleteClusterRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.GetCluster = channel.unary_unary( - "/google.cloud.dataproc.v1.ClusterController/GetCluster", - request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.GetClusterRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.Cluster.FromString, - ) - self.ListClusters = channel.unary_unary( - "/google.cloud.dataproc.v1.ClusterController/ListClusters", - request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.ListClustersRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.ListClustersResponse.FromString, - ) - self.DiagnoseCluster = channel.unary_unary( - "/google.cloud.dataproc.v1.ClusterController/DiagnoseCluster", - request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.DiagnoseClusterRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - - -class ClusterControllerServicer(object): - """The ClusterControllerService provides methods to manage clusters - of Compute Engine instances. - """ - - def CreateCluster(self, request, context): - """Creates a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateCluster(self, request, context): - """Updates a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteCluster(self, request, context): - """Deletes a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetCluster(self, request, context): - """Gets the resource representation for a cluster in a project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListClusters(self, request, context): - """Lists all regions/{region}/clusters in a project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DiagnoseCluster(self, request, context): - """Gets cluster diagnostic information. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - After the operation completes, - [Operation.response][google.longrunning.Operation.response] - contains - [DiagnoseClusterResults](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_ClusterControllerServicer_to_server(servicer, server): - rpc_method_handlers = { - "CreateCluster": grpc.unary_unary_rpc_method_handler( - servicer.CreateCluster, - request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.CreateClusterRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "UpdateCluster": grpc.unary_unary_rpc_method_handler( - servicer.UpdateCluster, - request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.UpdateClusterRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "DeleteCluster": grpc.unary_unary_rpc_method_handler( - servicer.DeleteCluster, - request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.DeleteClusterRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "GetCluster": grpc.unary_unary_rpc_method_handler( - servicer.GetCluster, - request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.GetClusterRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.Cluster.SerializeToString, - ), - "ListClusters": grpc.unary_unary_rpc_method_handler( - servicer.ListClusters, - request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.ListClustersRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.ListClustersResponse.SerializeToString, - ), - "DiagnoseCluster": grpc.unary_unary_rpc_method_handler( - servicer.DiagnoseCluster, - request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.DiagnoseClusterRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.dataproc.v1.ClusterController", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/dataproc/google/cloud/dataproc_v1/proto/jobs.proto b/dataproc/google/cloud/dataproc_v1/proto/jobs.proto deleted file mode 100644 index bcb68fed0bb4..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/jobs.proto +++ /dev/null @@ -1,792 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.dataproc.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "JobsProto"; -option java_package = "com.google.cloud.dataproc.v1"; - -// The JobController provides methods to manage jobs. -service JobController { - option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Submits a job to a cluster. - rpc SubmitJob(SubmitJobRequest) returns (Job) { - option (google.api.http) = { - post: "/v1/projects/{project_id}/regions/{region}/jobs:submit" - body: "*" - }; - option (google.api.method_signature) = "project_id,region,job"; - } - - // Gets the resource representation for a job in a project. - rpc GetJob(GetJobRequest) returns (Job) { - option (google.api.http) = { - get: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" - }; - option (google.api.method_signature) = "project_id,region,job_id"; - } - - // Lists regions/{region}/jobs in a project. - rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { - option (google.api.http) = { - get: "/v1/projects/{project_id}/regions/{region}/jobs" - }; - option (google.api.method_signature) = "project_id,region"; - option (google.api.method_signature) = "project_id,region,filter"; - } - - // Updates a job in a project. - rpc UpdateJob(UpdateJobRequest) returns (Job) { - option (google.api.http) = { - patch: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" - body: "job" - }; - } - - // Starts a job cancellation request. To access the job resource - // after cancellation, call - // [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) - // or - // [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). - rpc CancelJob(CancelJobRequest) returns (Job) { - option (google.api.http) = { - post: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" - body: "*" - }; - option (google.api.method_signature) = "project_id,region,job_id"; - } - - // Deletes the job from the project. If the job is active, the delete fails, - // and the response returns `FAILED_PRECONDITION`. - rpc DeleteJob(DeleteJobRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" - }; - option (google.api.method_signature) = "project_id,region,job_id"; - } -} - -// The runtime logging config of the job. -message LoggingConfig { - // The Log4j level for job execution. When running an - // [Apache Hive](http://hive.apache.org/) job, Cloud - // Dataproc configures the Hive client to an equivalent verbosity level. - enum Level { - // Level is unspecified. Use default level for log4j. - LEVEL_UNSPECIFIED = 0; - - // Use ALL level for log4j. - ALL = 1; - - // Use TRACE level for log4j. - TRACE = 2; - - // Use DEBUG level for log4j. - DEBUG = 3; - - // Use INFO level for log4j. - INFO = 4; - - // Use WARN level for log4j. - WARN = 5; - - // Use ERROR level for log4j. - ERROR = 6; - - // Use FATAL level for log4j. - FATAL = 7; - - // Turn off log4j. - OFF = 8; - } - - // The per-package log levels for the driver. This may include - // "root" package name to configure rootLogger. - // Examples: - // 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' - map driver_log_levels = 2; -} - -// A Dataproc job for running -// [Apache Hadoop -// MapReduce](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html) -// jobs on [Apache Hadoop -// YARN](https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html). -message HadoopJob { - // Required. Indicates the location of the driver's main class. Specify - // either the jar file that contains the main class or the main class name. - // To specify both, add the jar file to `jar_file_uris`, and then specify - // the main class name in this property. - oneof driver { - // The HCFS URI of the jar file containing the main class. - // Examples: - // 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' - // 'hdfs:/tmp/test-samples/custom-wordcount.jar' - // 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar' - string main_jar_file_uri = 1; - - // The name of the driver's main class. The jar file containing the class - // must be in the default CLASSPATH or specified in `jar_file_uris`. - string main_class = 2; - } - - // Optional. The arguments to pass to the driver. Do not - // include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as - // job properties, since a collision may occur that causes an incorrect job - // submission. - repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Jar file URIs to add to the CLASSPATHs of the - // Hadoop driver and tasks. - repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied - // to the working directory of Hadoop drivers and distributed tasks. Useful - // for naively parallel tasks. - repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of archives to be extracted in the working directory of - // Hadoop drivers and tasks. Supported file types: - // .jar, .tar, .tar.gz, .tgz, or .zip. - repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names to values, used to configure Hadoop. - // Properties that conflict with values set by the Dataproc API may be - // overwritten. Can include properties set in /etc/hadoop/conf/*-site and - // classes in user code. - map properties = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; -} - -// A Dataproc job for running [Apache Spark](http://spark.apache.org/) -// applications on YARN. -message SparkJob { - // Required. The specification of the main method to call to drive the job. - // Specify either the jar file that contains the main class or the main class - // name. To pass both a main jar and a main class in that jar, add the jar to - // `CommonJob.jar_file_uris`, and then specify the main class name in - // `main_class`. - oneof driver { - // The HCFS URI of the jar file that contains the main class. - string main_jar_file_uri = 1; - - // The name of the driver's main class. The jar file that contains the class - // must be in the default CLASSPATH or specified in `jar_file_uris`. - string main_class = 2; - } - - // Optional. The arguments to pass to the driver. Do not include arguments, - // such as `--conf`, that can be set as job properties, since a collision may - // occur that causes an incorrect job submission. - repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the - // Spark driver and tasks. - repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of files to be copied to the working directory of - // Spark drivers and distributed tasks. Useful for naively parallel tasks. - repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of archives to be extracted in the working directory - // of Spark drivers and tasks. Supported file types: - // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names to values, used to configure Spark. - // Properties that conflict with values set by the Dataproc API may be - // overwritten. Can include properties set in - // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; -} - -// A Dataproc job for running -// [Apache -// PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html) -// applications on YARN. -message PySparkJob { - // Required. The HCFS URI of the main Python file to use as the driver. Must - // be a .py file. - string main_python_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The arguments to pass to the driver. Do not include arguments, - // such as `--conf`, that can be set as job properties, since a collision may - // occur that causes an incorrect job submission. - repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS file URIs of Python files to pass to the PySpark - // framework. Supported file types: .py, .egg, and .zip. - repeated string python_file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the - // Python driver and tasks. - repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of files to be copied to the working directory of - // Python drivers and distributed tasks. Useful for naively parallel tasks. - repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of archives to be extracted in the working directory of - // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names to values, used to configure PySpark. - // Properties that conflict with values set by the Dataproc API may be - // overwritten. Can include properties set in - // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; -} - -// A list of queries to run on a cluster. -message QueryList { - // Required. The queries to execute. You do not need to terminate a query - // with a semicolon. Multiple queries can be specified in one string - // by separating each with a semicolon. Here is an example of an Cloud - // Dataproc API snippet that uses a QueryList to specify a HiveJob: - // - // "hiveJob": { - // "queryList": { - // "queries": [ - // "query1", - // "query2", - // "query3;query4", - // ] - // } - // } - repeated string queries = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// A Dataproc job for running [Apache Hive](https://hive.apache.org/) -// queries on YARN. -message HiveJob { - // Required. The sequence of Hive queries to execute, specified as either - // an HCFS file URI or a list of queries. - oneof queries { - // The HCFS URI of the script that contains Hive queries. - string query_file_uri = 1; - - // A list of queries. - QueryList query_list = 2; - } - - // Optional. Whether to continue executing queries if a query fails. - // The default value is `false`. Setting to `true` can be useful when - // executing independent parallel queries. - bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Mapping of query variable names to values (equivalent to the - // Hive command: `SET name="value";`). - map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names and values, used to configure Hive. - // Properties that conflict with values set by the Dataproc API may be - // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, - // /etc/hive/conf/hive-site.xml, and classes in user code. - map properties = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of jar files to add to the CLASSPATH of the - // Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes - // and UDFs. - repeated string jar_file_uris = 6 [(google.api.field_behavior) = OPTIONAL]; -} - -// A Dataproc job for running [Apache Spark -// SQL](http://spark.apache.org/sql/) queries. -message SparkSqlJob { - // Required. The sequence of Spark SQL queries to execute, specified as - // either an HCFS file URI or as a list of queries. - oneof queries { - // The HCFS URI of the script that contains SQL queries. - string query_file_uri = 1; - - // A list of queries. - QueryList query_list = 2; - } - - // Optional. Mapping of query variable names to values (equivalent to the - // Spark SQL command: SET `name="value";`). - map script_variables = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names to values, used to configure - // Spark SQL's SparkConf. Properties that conflict with values set by the - // Dataproc API may be overwritten. - map properties = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. - repeated string jar_file_uris = 56 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 6 [(google.api.field_behavior) = OPTIONAL]; -} - -// A Dataproc job for running [Apache Pig](https://pig.apache.org/) -// queries on YARN. -message PigJob { - // Required. The sequence of Pig queries to execute, specified as an HCFS - // file URI or a list of queries. - oneof queries { - // The HCFS URI of the script that contains the Pig queries. - string query_file_uri = 1; - - // A list of queries. - QueryList query_list = 2; - } - - // Optional. Whether to continue executing queries if a query fails. - // The default value is `false`. Setting to `true` can be useful when - // executing independent parallel queries. - bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Mapping of query variable names to values (equivalent to the Pig - // command: `name=[value]`). - map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names to values, used to configure Pig. - // Properties that conflict with values set by the Dataproc API may be - // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, - // /etc/pig/conf/pig.properties, and classes in user code. - map properties = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of jar files to add to the CLASSPATH of - // the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. - repeated string jar_file_uris = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 7 [(google.api.field_behavior) = OPTIONAL]; -} - -// Dataproc job config. -message JobPlacement { - // Required. The name of the cluster where the job will be submitted. - string cluster_name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Output only. A cluster UUID generated by the Dataproc service when - // the job is submitted. - string cluster_uuid = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Dataproc job status. -message JobStatus { - // The job state. - enum State { - // The job state is unknown. - STATE_UNSPECIFIED = 0; - - // The job is pending; it has been submitted, but is not yet running. - PENDING = 1; - - // Job has been received by the service and completed initial setup; - // it will soon be submitted to the cluster. - SETUP_DONE = 8; - - // The job is running on the cluster. - RUNNING = 2; - - // A CancelJob request has been received, but is pending. - CANCEL_PENDING = 3; - - // Transient in-flight resources have been canceled, and the request to - // cancel the running job has been issued to the cluster. - CANCEL_STARTED = 7; - - // The job cancellation was successful. - CANCELLED = 4; - - // The job has completed successfully. - DONE = 5; - - // The job has completed, but encountered an error. - ERROR = 6; - - // Job attempt has failed. The detail field contains failure details for - // this attempt. - // - // Applies to restartable jobs only. - ATTEMPT_FAILURE = 9; - } - - // The job substate. - enum Substate { - // The job substate is unknown. - UNSPECIFIED = 0; - - // The Job is submitted to the agent. - // - // Applies to RUNNING state. - SUBMITTED = 1; - - // The Job has been received and is awaiting execution (it may be waiting - // for a condition to be met). See the "details" field for the reason for - // the delay. - // - // Applies to RUNNING state. - QUEUED = 2; - - // The agent-reported status is out of date, which may be caused by a - // loss of communication between the agent and Dataproc. If the - // agent does not send a timely update, the job will fail. - // - // Applies to RUNNING state. - STALE_STATUS = 3; - } - - // Output only. A state message specifying the overall job state. - State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. Output only. Job state details, such as an error - // description if the state is ERROR. - string details = 2 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = OPTIONAL - ]; - - // Output only. The time when this state was entered. - google.protobuf.Timestamp state_start_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Additional state information, which includes - // status reported by the agent. - Substate substate = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Encapsulates the full scoping used to reference a job. -message JobReference { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The job ID, which must be unique within the project. - // - // The ID must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), or hyphens (-). The maximum length is 100 characters. - // - // If not specified by the caller, the job ID will be provided by the server. - string job_id = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A YARN application created by a job. Application information is a subset of -// org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. -// -// **Beta Feature**: This report is available for testing purposes only. It may -// be changed before final release. -message YarnApplication { - // The application state, corresponding to - // YarnProtos.YarnApplicationStateProto. - enum State { - // Status is unspecified. - STATE_UNSPECIFIED = 0; - - // Status is NEW. - NEW = 1; - - // Status is NEW_SAVING. - NEW_SAVING = 2; - - // Status is SUBMITTED. - SUBMITTED = 3; - - // Status is ACCEPTED. - ACCEPTED = 4; - - // Status is RUNNING. - RUNNING = 5; - - // Status is FINISHED. - FINISHED = 6; - - // Status is FAILED. - FAILED = 7; - - // Status is KILLED. - KILLED = 8; - } - - // Required. The application name. - string name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The application state. - State state = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The numerical progress of the application, from 1 to 100. - float progress = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The HTTP URL of the ApplicationMaster, HistoryServer, or - // TimelineServer that provides application-specific information. The URL uses - // the internal hostname, and requires a proxy server for resolution and, - // possibly, access. - string tracking_url = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// A Dataproc job resource. -message Job { - // Optional. The fully qualified reference to the job, which can be used to - // obtain the equivalent REST path of the job resource. If this property - // is not specified when a job is created, the server generates a - // job_id. - JobReference reference = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Job information, including how, when, and where to - // run the job. - JobPlacement placement = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The application/framework-specific portion of the job. - oneof type_job { - // Job is a Hadoop job. - HadoopJob hadoop_job = 3; - - // Job is a Spark job. - SparkJob spark_job = 4; - - // Job is a Pyspark job. - PySparkJob pyspark_job = 5; - - // Job is a Hive job. - HiveJob hive_job = 6; - - // Job is a Pig job. - PigJob pig_job = 7; - - // Job is a SparkSql job. - SparkSqlJob spark_sql_job = 12; - } - - // Output only. The job status. Additional application-specific - // status information may be contained in the type_job - // and yarn_applications fields. - JobStatus status = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The previous job status. - repeated JobStatus status_history = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The collection of YARN applications spun up by this job. - // - // **Beta** Feature: This report is available for testing purposes only. It - // may be changed before final release. - repeated YarnApplication yarn_applications = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. A URI pointing to the location of the stdout of the job's - // driver program. - string driver_output_resource_uri = 17 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. If present, the location of miscellaneous control files - // which may be used as part of job setup and handling. If not present, - // control files may be placed in the same location as `driver_output_uri`. - string driver_control_files_uri = 15 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The labels to associate with this job. - // Label **keys** must contain 1 to 63 characters, and must conform to - // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). - // Label **values** may be empty, but, if present, must contain 1 to 63 - // characters, and must conform to [RFC - // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be - // associated with a job. - map labels = 18 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job scheduling configuration. - JobScheduling scheduling = 20 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. A UUID that uniquely identifies a job within the project - // over time. This is in contrast to a user-settable reference.job_id that - // may be reused over time. - string job_uuid = 22 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Job scheduling options. -message JobScheduling { - // Optional. Maximum number of times per hour a driver may be restarted as - // a result of driver terminating with non-zero code before job is - // reported failed. - // - // A job may be reported as thrashing if driver exits with non-zero code - // 4 times within 10 minute window. - // - // Maximum value is 10. - int32 max_failures_per_hour = 1 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to submit a job. -message SubmitJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job resource. - Job job = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A unique id used to identify the request. If the server - // receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests with the same - // id, then the second request will be ignored and the - // first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend - // is returned. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to get the resource representation for a job in a project. -message GetJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job ID. - string job_id = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to list jobs in a project. -message ListJobsRequest { - // A matcher that specifies categories of job states. - enum JobStateMatcher { - // Match all jobs, regardless of state. - ALL = 0; - - // Only match jobs in non-terminal states: PENDING, RUNNING, or - // CANCEL_PENDING. - ACTIVE = 1; - - // Only match jobs in terminal states: CANCELLED, DONE, or ERROR. - NON_ACTIVE = 2; - } - - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 6 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The number of results to return in each response. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The page token, returned by a previous call, to request the - // next page of results. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If set, the returned jobs list includes only jobs that were - // submitted to the named cluster. - string cluster_name = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Specifies enumerated categories of jobs to list. - // (default = match ALL jobs). - // - // If `filter` is provided, `jobStateMatcher` will be ignored. - JobStateMatcher job_state_matcher = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A filter constraining the jobs to list. Filters are - // case-sensitive and have the following syntax: - // - // [field = value] AND [field [= value]] ... - // - // where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label - // key. **value** can be `*` to match all values. - // `status.state` can be either `ACTIVE` or `NON_ACTIVE`. - // Only the logical `AND` operator is supported; space-separated items are - // treated as having an implicit `AND` operator. - // - // Example filter: - // - // status.state = ACTIVE AND labels.env = staging AND labels.starred = * - string filter = 7 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to update a job. -message UpdateJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job ID. - string job_id = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The changes to the job. - Job job = 4 [(google.api.field_behavior) = REQUIRED]; - - // Required. Specifies the path, relative to Job, of - // the field to update. For example, to update the labels of a Job the - // update_mask parameter would be specified as - // labels, and the `PATCH` request body would specify the new - // value. Note: Currently, labels is the only - // field that can be updated. - google.protobuf.FieldMask update_mask = 5 [(google.api.field_behavior) = REQUIRED]; -} - -// A list of jobs in a project. -message ListJobsResponse { - // Output only. Jobs list. - repeated Job jobs = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. This token is included in the response if there are more results - // to fetch. To fetch additional results, provide this value as the - // `page_token` in a subsequent ListJobsRequest. - string next_page_token = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to cancel a job. -message CancelJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job ID. - string job_id = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to delete a job. -message DeleteJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job ID. - string job_id = 2 [(google.api.field_behavior) = REQUIRED]; -} diff --git a/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2.py deleted file mode 100644 index 68b7392f7db9..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2.py +++ /dev/null @@ -1,4335 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dataproc_v1/proto/jobs.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dataproc_v1/proto/jobs.proto", - package="google.cloud.dataproc.v1", - syntax="proto3", - serialized_options=_b( - "\n\034com.google.cloud.dataproc.v1B\tJobsProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" - ), - serialized_pb=_b( - '\n)google/cloud/dataproc_v1/proto/jobs.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc1\x02\n\rLoggingConfig\x12W\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32<.google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry\x1a\x65\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12<\n\x05value\x18\x02 \x01(\x0e\x32-.google.cloud.dataproc.v1.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xf1\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x11\n\x04\x61rgs\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12L\n\nproperties\x18\x07 \x03(\x0b\x32\x33.google.cloud.dataproc.v1.HadoopJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xef\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x11\n\x04\x61rgs\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12K\n\nproperties\x18\x07 \x03(\x0b\x32\x32.google.cloud.dataproc.v1.SparkJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xf8\x02\n\nPySparkJob\x12!\n\x14main_python_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04\x61rgs\x18\x02 \x03(\tB\x03\xe0\x41\x01\x12\x1d\n\x10python_file_uris\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12M\n\nproperties\x18\x07 \x03(\x0b\x32\x34.google.cloud.dataproc.v1.PySparkJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"!\n\tQueryList\x12\x14\n\x07queries\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xb5\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12 \n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12U\n\x10script_variables\x18\x04 \x03(\x0b\x32\x36.google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x31.google.cloud.dataproc.v1.HiveJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xe5\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12Y\n\x10script_variables\x18\x03 \x03(\x0b\x32:.google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12N\n\nproperties\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x38 \x03(\tB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xf8\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12 \n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12T\n\x10script_variables\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.PigJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12I\n\nproperties\x18\x05 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.PigJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x07 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"D\n\x0cJobPlacement\x12\x19\n\x0c\x63luster_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xd9\x03\n\tJobStatus\x12=\n\x05state\x18\x01 \x01(\x0e\x32).google.cloud.dataproc.v1.JobStatus.StateB\x03\xe0\x41\x03\x12\x17\n\x07\x64\x65tails\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x43\n\x08substate\x18\x07 \x01(\x0e\x32,.google.cloud.dataproc.v1.JobStatus.SubstateB\x03\xe0\x41\x03"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"<\n\x0cJobReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x01"\xa5\x02\n\x0fYarnApplication\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x43\n\x05state\x18\x02 \x01(\x0e\x32/.google.cloud.dataproc.v1.YarnApplication.StateB\x03\xe0\x41\x02\x12\x15\n\x08progress\x18\x03 \x01(\x02\x42\x03\xe0\x41\x02\x12\x19\n\x0ctracking_url\x18\x04 \x01(\tB\x03\xe0\x41\x01"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\xcd\x07\n\x03Job\x12>\n\treference\x18\x01 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobReferenceB\x03\xe0\x41\x01\x12>\n\tplacement\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobPlacementB\x03\xe0\x41\x02\x12\x39\n\nhadoop_job\x18\x03 \x01(\x0b\x32#.google.cloud.dataproc.v1.HadoopJobH\x00\x12\x37\n\tspark_job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1.SparkJobH\x00\x12;\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.PySparkJobH\x00\x12\x35\n\x08hive_job\x18\x06 \x01(\x0b\x32!.google.cloud.dataproc.v1.HiveJobH\x00\x12\x33\n\x07pig_job\x18\x07 \x01(\x0b\x32 .google.cloud.dataproc.v1.PigJobH\x00\x12>\n\rspark_sql_job\x18\x0c \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobH\x00\x12\x38\n\x06status\x18\x08 \x01(\x0b\x32#.google.cloud.dataproc.v1.JobStatusB\x03\xe0\x41\x03\x12@\n\x0estatus_history\x18\r \x03(\x0b\x32#.google.cloud.dataproc.v1.JobStatusB\x03\xe0\x41\x03\x12I\n\x11yarn_applications\x18\t \x03(\x0b\x32).google.cloud.dataproc.v1.YarnApplicationB\x03\xe0\x41\x03\x12\'\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\tB\x03\xe0\x41\x03\x12%\n\x18\x64river_control_files_uri\x18\x0f \x01(\tB\x03\xe0\x41\x03\x12>\n\x06labels\x18\x12 \x03(\x0b\x32).google.cloud.dataproc.v1.Job.LabelsEntryB\x03\xe0\x41\x01\x12@\n\nscheduling\x18\x14 \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobSchedulingB\x03\xe0\x41\x01\x12\x15\n\x08job_uuid\x18\x16 \x01(\tB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job"3\n\rJobScheduling\x12"\n\x15max_failures_per_hour\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01"\x8a\x01\n\x10SubmitJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12/\n\x03job\x18\x02 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"R\n\rGetJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"\xb3\x02\n\x0fListJobsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x63luster_name\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12Y\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32\x39.google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcherB\x03\xe0\x41\x01\x12\x13\n\x06\x66ilter\x18\x07 \x01(\tB\x03\xe0\x41\x01"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xbc\x01\n\x10UpdateJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12/\n\x03job\x18\x04 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"b\n\x10ListJobsResponse\x12\x30\n\x04jobs\x18\x01 \x03(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x01"U\n\x10\x43\x61ncelJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"U\n\x10\x44\x65leteJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x32\x9b\t\n\rJobController\x12\xb1\x01\n\tSubmitJob\x12*.google.cloud.dataproc.v1.SubmitJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"Y\x82\xd3\xe4\x93\x02;"6/v1/projects/{project_id}/regions/{region}/jobs:submit:\x01*\xda\x41\x15project_id,region,job\x12\xad\x01\n\x06GetJob\x12\'.google.cloud.dataproc.v1.GetJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"[\x82\xd3\xe4\x93\x02:\x12\x38/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x18project_id,region,job_id\x12\xc9\x01\n\x08ListJobs\x12).google.cloud.dataproc.v1.ListJobsRequest\x1a*.google.cloud.dataproc.v1.ListJobsResponse"f\x82\xd3\xe4\x93\x02\x31\x12//v1/projects/{project_id}/regions/{region}/jobs\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\x9d\x01\n\tUpdateJob\x12*.google.cloud.dataproc.v1.UpdateJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"E\x82\xd3\xe4\x93\x02?28/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xbd\x01\n\tCancelJob\x12*.google.cloud.dataproc.v1.CancelJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"e\x82\xd3\xe4\x93\x02\x44"?/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\xda\x41\x18project_id,region,job_id\x12\xac\x01\n\tDeleteJob\x12*.google.cloud.dataproc.v1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02:*8/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x18project_id,region,job_id\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBm\n\x1c\x63om.google.cloud.dataproc.v1B\tJobsProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_LOGGINGCONFIG_LEVEL = _descriptor.EnumDescriptor( - name="Level", - full_name="google.cloud.dataproc.v1.LoggingConfig.Level", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="LEVEL_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALL", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="TRACE", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DEBUG", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="INFO", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="WARN", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ERROR", index=6, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FATAL", index=7, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="OFF", index=8, number=8, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=465, - serialized_end=577, -) -_sym_db.RegisterEnumDescriptor(_LOGGINGCONFIG_LEVEL) - -_JOBSTATUS_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.cloud.dataproc.v1.JobStatus.State", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="STATE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="PENDING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SETUP_DONE", index=2, number=8, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", index=3, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CANCEL_PENDING", index=4, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CANCEL_STARTED", index=5, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CANCELLED", index=6, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DONE", index=7, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ERROR", index=8, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ATTEMPT_FAILURE", - index=9, - number=9, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3471, - serialized_end=3640, -) -_sym_db.RegisterEnumDescriptor(_JOBSTATUS_STATE) - -_JOBSTATUS_SUBSTATE = _descriptor.EnumDescriptor( - name="Substate", - full_name="google.cloud.dataproc.v1.JobStatus.Substate", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="UNSPECIFIED", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SUBMITTED", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="QUEUED", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="STALE_STATUS", index=3, number=3, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3642, - serialized_end=3714, -) -_sym_db.RegisterEnumDescriptor(_JOBSTATUS_SUBSTATE) - -_YARNAPPLICATION_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.cloud.dataproc.v1.YarnApplication.State", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="STATE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="NEW", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="NEW_SAVING", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SUBMITTED", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ACCEPTED", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FINISHED", index=6, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FAILED", index=7, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="KILLED", index=8, number=8, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3937, - serialized_end=4072, -) -_sym_db.RegisterEnumDescriptor(_YARNAPPLICATION_STATE) - -_LISTJOBSREQUEST_JOBSTATEMATCHER = _descriptor.EnumDescriptor( - name="JobStateMatcher", - full_name="google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcher", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="ALL", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ACTIVE", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="NON_ACTIVE", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=5582, - serialized_end=5636, -) -_sym_db.RegisterEnumDescriptor(_LISTJOBSREQUEST_JOBSTATEMATCHER) - - -_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY = _descriptor.Descriptor( - name="DriverLogLevelsEntry", - full_name="google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry.value", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=362, - serialized_end=463, -) - -_LOGGINGCONFIG = _descriptor.Descriptor( - name="LoggingConfig", - full_name="google.cloud.dataproc.v1.LoggingConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="driver_log_levels", - full_name="google.cloud.dataproc.v1.LoggingConfig.driver_log_levels", - index=0, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY], - enum_types=[_LOGGINGCONFIG_LEVEL], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=256, - serialized_end=577, -) - - -_HADOOPJOB_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1.HadoopJob.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.HadoopJob.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.HadoopJob.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=890, - serialized_end=939, -) - -_HADOOPJOB = _descriptor.Descriptor( - name="HadoopJob", - full_name="google.cloud.dataproc.v1.HadoopJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="main_jar_file_uri", - full_name="google.cloud.dataproc.v1.HadoopJob.main_jar_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="main_class", - full_name="google.cloud.dataproc.v1.HadoopJob.main_class", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="args", - full_name="google.cloud.dataproc.v1.HadoopJob.args", - index=2, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="jar_file_uris", - full_name="google.cloud.dataproc.v1.HadoopJob.jar_file_uris", - index=3, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="file_uris", - full_name="google.cloud.dataproc.v1.HadoopJob.file_uris", - index=4, - number=5, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="archive_uris", - full_name="google.cloud.dataproc.v1.HadoopJob.archive_uris", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1.HadoopJob.properties", - index=6, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="logging_config", - full_name="google.cloud.dataproc.v1.HadoopJob.logging_config", - index=7, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_HADOOPJOB_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="driver", - full_name="google.cloud.dataproc.v1.HadoopJob.driver", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=580, - serialized_end=949, -) - - -_SPARKJOB_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1.SparkJob.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.SparkJob.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.SparkJob.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=890, - serialized_end=939, -) - -_SPARKJOB = _descriptor.Descriptor( - name="SparkJob", - full_name="google.cloud.dataproc.v1.SparkJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="main_jar_file_uri", - full_name="google.cloud.dataproc.v1.SparkJob.main_jar_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="main_class", - full_name="google.cloud.dataproc.v1.SparkJob.main_class", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="args", - full_name="google.cloud.dataproc.v1.SparkJob.args", - index=2, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="jar_file_uris", - full_name="google.cloud.dataproc.v1.SparkJob.jar_file_uris", - index=3, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="file_uris", - full_name="google.cloud.dataproc.v1.SparkJob.file_uris", - index=4, - number=5, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="archive_uris", - full_name="google.cloud.dataproc.v1.SparkJob.archive_uris", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1.SparkJob.properties", - index=6, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="logging_config", - full_name="google.cloud.dataproc.v1.SparkJob.logging_config", - index=7, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_SPARKJOB_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="driver", - full_name="google.cloud.dataproc.v1.SparkJob.driver", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=952, - serialized_end=1319, -) - - -_PYSPARKJOB_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1.PySparkJob.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.PySparkJob.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.PySparkJob.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=890, - serialized_end=939, -) - -_PYSPARKJOB = _descriptor.Descriptor( - name="PySparkJob", - full_name="google.cloud.dataproc.v1.PySparkJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="main_python_file_uri", - full_name="google.cloud.dataproc.v1.PySparkJob.main_python_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="args", - full_name="google.cloud.dataproc.v1.PySparkJob.args", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="python_file_uris", - full_name="google.cloud.dataproc.v1.PySparkJob.python_file_uris", - index=2, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="jar_file_uris", - full_name="google.cloud.dataproc.v1.PySparkJob.jar_file_uris", - index=3, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="file_uris", - full_name="google.cloud.dataproc.v1.PySparkJob.file_uris", - index=4, - number=5, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="archive_uris", - full_name="google.cloud.dataproc.v1.PySparkJob.archive_uris", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1.PySparkJob.properties", - index=6, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="logging_config", - full_name="google.cloud.dataproc.v1.PySparkJob.logging_config", - index=7, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_PYSPARKJOB_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1322, - serialized_end=1698, -) - - -_QUERYLIST = _descriptor.Descriptor( - name="QueryList", - full_name="google.cloud.dataproc.v1.QueryList", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="queries", - full_name="google.cloud.dataproc.v1.QueryList.queries", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1700, - serialized_end=1733, -) - - -_HIVEJOB_SCRIPTVARIABLESENTRY = _descriptor.Descriptor( - name="ScriptVariablesEntry", - full_name="google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2057, - serialized_end=2111, -) - -_HIVEJOB_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1.HiveJob.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.HiveJob.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.HiveJob.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=890, - serialized_end=939, -) - -_HIVEJOB = _descriptor.Descriptor( - name="HiveJob", - full_name="google.cloud.dataproc.v1.HiveJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="query_file_uri", - full_name="google.cloud.dataproc.v1.HiveJob.query_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="query_list", - full_name="google.cloud.dataproc.v1.HiveJob.query_list", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="continue_on_failure", - full_name="google.cloud.dataproc.v1.HiveJob.continue_on_failure", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="script_variables", - full_name="google.cloud.dataproc.v1.HiveJob.script_variables", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1.HiveJob.properties", - index=4, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="jar_file_uris", - full_name="google.cloud.dataproc.v1.HiveJob.jar_file_uris", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_HIVEJOB_SCRIPTVARIABLESENTRY, _HIVEJOB_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="queries", - full_name="google.cloud.dataproc.v1.HiveJob.queries", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=1736, - serialized_end=2173, -) - - -_SPARKSQLJOB_SCRIPTVARIABLESENTRY = _descriptor.Descriptor( - name="ScriptVariablesEntry", - full_name="google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2057, - serialized_end=2111, -) - -_SPARKSQLJOB_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=890, - serialized_end=939, -) - -_SPARKSQLJOB = _descriptor.Descriptor( - name="SparkSqlJob", - full_name="google.cloud.dataproc.v1.SparkSqlJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="query_file_uri", - full_name="google.cloud.dataproc.v1.SparkSqlJob.query_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="query_list", - full_name="google.cloud.dataproc.v1.SparkSqlJob.query_list", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="script_variables", - full_name="google.cloud.dataproc.v1.SparkSqlJob.script_variables", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1.SparkSqlJob.properties", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="jar_file_uris", - full_name="google.cloud.dataproc.v1.SparkSqlJob.jar_file_uris", - index=4, - number=56, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="logging_config", - full_name="google.cloud.dataproc.v1.SparkSqlJob.logging_config", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_SPARKSQLJOB_SCRIPTVARIABLESENTRY, _SPARKSQLJOB_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="queries", - full_name="google.cloud.dataproc.v1.SparkSqlJob.queries", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=2176, - serialized_end=2661, -) - - -_PIGJOB_SCRIPTVARIABLESENTRY = _descriptor.Descriptor( - name="ScriptVariablesEntry", - full_name="google.cloud.dataproc.v1.PigJob.ScriptVariablesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.PigJob.ScriptVariablesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.PigJob.ScriptVariablesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2057, - serialized_end=2111, -) - -_PIGJOB_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1.PigJob.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.PigJob.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.PigJob.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=890, - serialized_end=939, -) - -_PIGJOB = _descriptor.Descriptor( - name="PigJob", - full_name="google.cloud.dataproc.v1.PigJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="query_file_uri", - full_name="google.cloud.dataproc.v1.PigJob.query_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="query_list", - full_name="google.cloud.dataproc.v1.PigJob.query_list", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="continue_on_failure", - full_name="google.cloud.dataproc.v1.PigJob.continue_on_failure", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="script_variables", - full_name="google.cloud.dataproc.v1.PigJob.script_variables", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1.PigJob.properties", - index=4, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="jar_file_uris", - full_name="google.cloud.dataproc.v1.PigJob.jar_file_uris", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="logging_config", - full_name="google.cloud.dataproc.v1.PigJob.logging_config", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_PIGJOB_SCRIPTVARIABLESENTRY, _PIGJOB_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="queries", - full_name="google.cloud.dataproc.v1.PigJob.queries", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=2664, - serialized_end=3168, -) - - -_JOBPLACEMENT = _descriptor.Descriptor( - name="JobPlacement", - full_name="google.cloud.dataproc.v1.JobPlacement", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1.JobPlacement.cluster_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_uuid", - full_name="google.cloud.dataproc.v1.JobPlacement.cluster_uuid", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3170, - serialized_end=3238, -) - - -_JOBSTATUS = _descriptor.Descriptor( - name="JobStatus", - full_name="google.cloud.dataproc.v1.JobStatus", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.dataproc.v1.JobStatus.state", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="details", - full_name="google.cloud.dataproc.v1.JobStatus.details", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="state_start_time", - full_name="google.cloud.dataproc.v1.JobStatus.state_start_time", - index=2, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="substate", - full_name="google.cloud.dataproc.v1.JobStatus.substate", - index=3, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_JOBSTATUS_STATE, _JOBSTATUS_SUBSTATE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3241, - serialized_end=3714, -) - - -_JOBREFERENCE = _descriptor.Descriptor( - name="JobReference", - full_name="google.cloud.dataproc.v1.JobReference", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.JobReference.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_id", - full_name="google.cloud.dataproc.v1.JobReference.job_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3716, - serialized_end=3776, -) - - -_YARNAPPLICATION = _descriptor.Descriptor( - name="YarnApplication", - full_name="google.cloud.dataproc.v1.YarnApplication", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.dataproc.v1.YarnApplication.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.dataproc.v1.YarnApplication.state", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="progress", - full_name="google.cloud.dataproc.v1.YarnApplication.progress", - index=2, - number=3, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="tracking_url", - full_name="google.cloud.dataproc.v1.YarnApplication.tracking_url", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_YARNAPPLICATION_STATE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3779, - serialized_end=4072, -) - - -_JOB_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.cloud.dataproc.v1.Job.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.Job.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.Job.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4991, - serialized_end=5036, -) - -_JOB = _descriptor.Descriptor( - name="Job", - full_name="google.cloud.dataproc.v1.Job", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="reference", - full_name="google.cloud.dataproc.v1.Job.reference", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="placement", - full_name="google.cloud.dataproc.v1.Job.placement", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="hadoop_job", - full_name="google.cloud.dataproc.v1.Job.hadoop_job", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="spark_job", - full_name="google.cloud.dataproc.v1.Job.spark_job", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="pyspark_job", - full_name="google.cloud.dataproc.v1.Job.pyspark_job", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="hive_job", - full_name="google.cloud.dataproc.v1.Job.hive_job", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="pig_job", - full_name="google.cloud.dataproc.v1.Job.pig_job", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="spark_sql_job", - full_name="google.cloud.dataproc.v1.Job.spark_sql_job", - index=7, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status", - full_name="google.cloud.dataproc.v1.Job.status", - index=8, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status_history", - full_name="google.cloud.dataproc.v1.Job.status_history", - index=9, - number=13, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="yarn_applications", - full_name="google.cloud.dataproc.v1.Job.yarn_applications", - index=10, - number=9, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="driver_output_resource_uri", - full_name="google.cloud.dataproc.v1.Job.driver_output_resource_uri", - index=11, - number=17, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="driver_control_files_uri", - full_name="google.cloud.dataproc.v1.Job.driver_control_files_uri", - index=12, - number=15, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.cloud.dataproc.v1.Job.labels", - index=13, - number=18, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="scheduling", - full_name="google.cloud.dataproc.v1.Job.scheduling", - index=14, - number=20, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_uuid", - full_name="google.cloud.dataproc.v1.Job.job_uuid", - index=15, - number=22, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_JOB_LABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type_job", - full_name="google.cloud.dataproc.v1.Job.type_job", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=4075, - serialized_end=5048, -) - - -_JOBSCHEDULING = _descriptor.Descriptor( - name="JobScheduling", - full_name="google.cloud.dataproc.v1.JobScheduling", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="max_failures_per_hour", - full_name="google.cloud.dataproc.v1.JobScheduling.max_failures_per_hour", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5050, - serialized_end=5101, -) - - -_SUBMITJOBREQUEST = _descriptor.Descriptor( - name="SubmitJobRequest", - full_name="google.cloud.dataproc.v1.SubmitJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.SubmitJobRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1.SubmitJobRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job", - full_name="google.cloud.dataproc.v1.SubmitJobRequest.job", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_id", - full_name="google.cloud.dataproc.v1.SubmitJobRequest.request_id", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5104, - serialized_end=5242, -) - - -_GETJOBREQUEST = _descriptor.Descriptor( - name="GetJobRequest", - full_name="google.cloud.dataproc.v1.GetJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.GetJobRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1.GetJobRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_id", - full_name="google.cloud.dataproc.v1.GetJobRequest.job_id", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5244, - serialized_end=5326, -) - - -_LISTJOBSREQUEST = _descriptor.Descriptor( - name="ListJobsRequest", - full_name="google.cloud.dataproc.v1.ListJobsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.ListJobsRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1.ListJobsRequest.region", - index=1, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.dataproc.v1.ListJobsRequest.page_size", - index=2, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.dataproc.v1.ListJobsRequest.page_token", - index=3, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1.ListJobsRequest.cluster_name", - index=4, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_state_matcher", - full_name="google.cloud.dataproc.v1.ListJobsRequest.job_state_matcher", - index=5, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.dataproc.v1.ListJobsRequest.filter", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_LISTJOBSREQUEST_JOBSTATEMATCHER], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5329, - serialized_end=5636, -) - - -_UPDATEJOBREQUEST = _descriptor.Descriptor( - name="UpdateJobRequest", - full_name="google.cloud.dataproc.v1.UpdateJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.UpdateJobRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1.UpdateJobRequest.region", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_id", - full_name="google.cloud.dataproc.v1.UpdateJobRequest.job_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job", - full_name="google.cloud.dataproc.v1.UpdateJobRequest.job", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.dataproc.v1.UpdateJobRequest.update_mask", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5639, - serialized_end=5827, -) - - -_LISTJOBSRESPONSE = _descriptor.Descriptor( - name="ListJobsResponse", - full_name="google.cloud.dataproc.v1.ListJobsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="jobs", - full_name="google.cloud.dataproc.v1.ListJobsResponse.jobs", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.dataproc.v1.ListJobsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5829, - serialized_end=5927, -) - - -_CANCELJOBREQUEST = _descriptor.Descriptor( - name="CancelJobRequest", - full_name="google.cloud.dataproc.v1.CancelJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.CancelJobRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1.CancelJobRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_id", - full_name="google.cloud.dataproc.v1.CancelJobRequest.job_id", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5929, - serialized_end=6014, -) - - -_DELETEJOBREQUEST = _descriptor.Descriptor( - name="DeleteJobRequest", - full_name="google.cloud.dataproc.v1.DeleteJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1.DeleteJobRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1.DeleteJobRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_id", - full_name="google.cloud.dataproc.v1.DeleteJobRequest.job_id", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6016, - serialized_end=6101, -) - -_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY.fields_by_name[ - "value" -].enum_type = _LOGGINGCONFIG_LEVEL -_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY.containing_type = _LOGGINGCONFIG -_LOGGINGCONFIG.fields_by_name[ - "driver_log_levels" -].message_type = _LOGGINGCONFIG_DRIVERLOGLEVELSENTRY -_LOGGINGCONFIG_LEVEL.containing_type = _LOGGINGCONFIG -_HADOOPJOB_PROPERTIESENTRY.containing_type = _HADOOPJOB -_HADOOPJOB.fields_by_name["properties"].message_type = _HADOOPJOB_PROPERTIESENTRY -_HADOOPJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG -_HADOOPJOB.oneofs_by_name["driver"].fields.append( - _HADOOPJOB.fields_by_name["main_jar_file_uri"] -) -_HADOOPJOB.fields_by_name[ - "main_jar_file_uri" -].containing_oneof = _HADOOPJOB.oneofs_by_name["driver"] -_HADOOPJOB.oneofs_by_name["driver"].fields.append( - _HADOOPJOB.fields_by_name["main_class"] -) -_HADOOPJOB.fields_by_name["main_class"].containing_oneof = _HADOOPJOB.oneofs_by_name[ - "driver" -] -_SPARKJOB_PROPERTIESENTRY.containing_type = _SPARKJOB -_SPARKJOB.fields_by_name["properties"].message_type = _SPARKJOB_PROPERTIESENTRY -_SPARKJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG -_SPARKJOB.oneofs_by_name["driver"].fields.append( - _SPARKJOB.fields_by_name["main_jar_file_uri"] -) -_SPARKJOB.fields_by_name[ - "main_jar_file_uri" -].containing_oneof = _SPARKJOB.oneofs_by_name["driver"] -_SPARKJOB.oneofs_by_name["driver"].fields.append(_SPARKJOB.fields_by_name["main_class"]) -_SPARKJOB.fields_by_name["main_class"].containing_oneof = _SPARKJOB.oneofs_by_name[ - "driver" -] -_PYSPARKJOB_PROPERTIESENTRY.containing_type = _PYSPARKJOB -_PYSPARKJOB.fields_by_name["properties"].message_type = _PYSPARKJOB_PROPERTIESENTRY -_PYSPARKJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG -_HIVEJOB_SCRIPTVARIABLESENTRY.containing_type = _HIVEJOB -_HIVEJOB_PROPERTIESENTRY.containing_type = _HIVEJOB -_HIVEJOB.fields_by_name["query_list"].message_type = _QUERYLIST -_HIVEJOB.fields_by_name["script_variables"].message_type = _HIVEJOB_SCRIPTVARIABLESENTRY -_HIVEJOB.fields_by_name["properties"].message_type = _HIVEJOB_PROPERTIESENTRY -_HIVEJOB.oneofs_by_name["queries"].fields.append( - _HIVEJOB.fields_by_name["query_file_uri"] -) -_HIVEJOB.fields_by_name["query_file_uri"].containing_oneof = _HIVEJOB.oneofs_by_name[ - "queries" -] -_HIVEJOB.oneofs_by_name["queries"].fields.append(_HIVEJOB.fields_by_name["query_list"]) -_HIVEJOB.fields_by_name["query_list"].containing_oneof = _HIVEJOB.oneofs_by_name[ - "queries" -] -_SPARKSQLJOB_SCRIPTVARIABLESENTRY.containing_type = _SPARKSQLJOB -_SPARKSQLJOB_PROPERTIESENTRY.containing_type = _SPARKSQLJOB -_SPARKSQLJOB.fields_by_name["query_list"].message_type = _QUERYLIST -_SPARKSQLJOB.fields_by_name[ - "script_variables" -].message_type = _SPARKSQLJOB_SCRIPTVARIABLESENTRY -_SPARKSQLJOB.fields_by_name["properties"].message_type = _SPARKSQLJOB_PROPERTIESENTRY -_SPARKSQLJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG -_SPARKSQLJOB.oneofs_by_name["queries"].fields.append( - _SPARKSQLJOB.fields_by_name["query_file_uri"] -) -_SPARKSQLJOB.fields_by_name[ - "query_file_uri" -].containing_oneof = _SPARKSQLJOB.oneofs_by_name["queries"] -_SPARKSQLJOB.oneofs_by_name["queries"].fields.append( - _SPARKSQLJOB.fields_by_name["query_list"] -) -_SPARKSQLJOB.fields_by_name[ - "query_list" -].containing_oneof = _SPARKSQLJOB.oneofs_by_name["queries"] -_PIGJOB_SCRIPTVARIABLESENTRY.containing_type = _PIGJOB -_PIGJOB_PROPERTIESENTRY.containing_type = _PIGJOB -_PIGJOB.fields_by_name["query_list"].message_type = _QUERYLIST -_PIGJOB.fields_by_name["script_variables"].message_type = _PIGJOB_SCRIPTVARIABLESENTRY -_PIGJOB.fields_by_name["properties"].message_type = _PIGJOB_PROPERTIESENTRY -_PIGJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG -_PIGJOB.oneofs_by_name["queries"].fields.append( - _PIGJOB.fields_by_name["query_file_uri"] -) -_PIGJOB.fields_by_name["query_file_uri"].containing_oneof = _PIGJOB.oneofs_by_name[ - "queries" -] -_PIGJOB.oneofs_by_name["queries"].fields.append(_PIGJOB.fields_by_name["query_list"]) -_PIGJOB.fields_by_name["query_list"].containing_oneof = _PIGJOB.oneofs_by_name[ - "queries" -] -_JOBSTATUS.fields_by_name["state"].enum_type = _JOBSTATUS_STATE -_JOBSTATUS.fields_by_name[ - "state_start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_JOBSTATUS.fields_by_name["substate"].enum_type = _JOBSTATUS_SUBSTATE -_JOBSTATUS_STATE.containing_type = _JOBSTATUS -_JOBSTATUS_SUBSTATE.containing_type = _JOBSTATUS -_YARNAPPLICATION.fields_by_name["state"].enum_type = _YARNAPPLICATION_STATE -_YARNAPPLICATION_STATE.containing_type = _YARNAPPLICATION -_JOB_LABELSENTRY.containing_type = _JOB -_JOB.fields_by_name["reference"].message_type = _JOBREFERENCE -_JOB.fields_by_name["placement"].message_type = _JOBPLACEMENT -_JOB.fields_by_name["hadoop_job"].message_type = _HADOOPJOB -_JOB.fields_by_name["spark_job"].message_type = _SPARKJOB -_JOB.fields_by_name["pyspark_job"].message_type = _PYSPARKJOB -_JOB.fields_by_name["hive_job"].message_type = _HIVEJOB -_JOB.fields_by_name["pig_job"].message_type = _PIGJOB -_JOB.fields_by_name["spark_sql_job"].message_type = _SPARKSQLJOB -_JOB.fields_by_name["status"].message_type = _JOBSTATUS -_JOB.fields_by_name["status_history"].message_type = _JOBSTATUS -_JOB.fields_by_name["yarn_applications"].message_type = _YARNAPPLICATION -_JOB.fields_by_name["labels"].message_type = _JOB_LABELSENTRY -_JOB.fields_by_name["scheduling"].message_type = _JOBSCHEDULING -_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["hadoop_job"]) -_JOB.fields_by_name["hadoop_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] -_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["spark_job"]) -_JOB.fields_by_name["spark_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] -_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["pyspark_job"]) -_JOB.fields_by_name["pyspark_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] -_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["hive_job"]) -_JOB.fields_by_name["hive_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] -_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["pig_job"]) -_JOB.fields_by_name["pig_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] -_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["spark_sql_job"]) -_JOB.fields_by_name["spark_sql_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] -_SUBMITJOBREQUEST.fields_by_name["job"].message_type = _JOB -_LISTJOBSREQUEST.fields_by_name[ - "job_state_matcher" -].enum_type = _LISTJOBSREQUEST_JOBSTATEMATCHER -_LISTJOBSREQUEST_JOBSTATEMATCHER.containing_type = _LISTJOBSREQUEST -_UPDATEJOBREQUEST.fields_by_name["job"].message_type = _JOB -_UPDATEJOBREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTJOBSRESPONSE.fields_by_name["jobs"].message_type = _JOB -DESCRIPTOR.message_types_by_name["LoggingConfig"] = _LOGGINGCONFIG -DESCRIPTOR.message_types_by_name["HadoopJob"] = _HADOOPJOB -DESCRIPTOR.message_types_by_name["SparkJob"] = _SPARKJOB -DESCRIPTOR.message_types_by_name["PySparkJob"] = _PYSPARKJOB -DESCRIPTOR.message_types_by_name["QueryList"] = _QUERYLIST -DESCRIPTOR.message_types_by_name["HiveJob"] = _HIVEJOB -DESCRIPTOR.message_types_by_name["SparkSqlJob"] = _SPARKSQLJOB -DESCRIPTOR.message_types_by_name["PigJob"] = _PIGJOB -DESCRIPTOR.message_types_by_name["JobPlacement"] = _JOBPLACEMENT -DESCRIPTOR.message_types_by_name["JobStatus"] = _JOBSTATUS -DESCRIPTOR.message_types_by_name["JobReference"] = _JOBREFERENCE -DESCRIPTOR.message_types_by_name["YarnApplication"] = _YARNAPPLICATION -DESCRIPTOR.message_types_by_name["Job"] = _JOB -DESCRIPTOR.message_types_by_name["JobScheduling"] = _JOBSCHEDULING -DESCRIPTOR.message_types_by_name["SubmitJobRequest"] = _SUBMITJOBREQUEST -DESCRIPTOR.message_types_by_name["GetJobRequest"] = _GETJOBREQUEST -DESCRIPTOR.message_types_by_name["ListJobsRequest"] = _LISTJOBSREQUEST -DESCRIPTOR.message_types_by_name["UpdateJobRequest"] = _UPDATEJOBREQUEST -DESCRIPTOR.message_types_by_name["ListJobsResponse"] = _LISTJOBSRESPONSE -DESCRIPTOR.message_types_by_name["CancelJobRequest"] = _CANCELJOBREQUEST -DESCRIPTOR.message_types_by_name["DeleteJobRequest"] = _DELETEJOBREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LoggingConfig = _reflection.GeneratedProtocolMessageType( - "LoggingConfig", - (_message.Message,), - dict( - DriverLogLevelsEntry=_reflection.GeneratedProtocolMessageType( - "DriverLogLevelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry) - ), - ), - DESCRIPTOR=_LOGGINGCONFIG, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""The runtime logging config of the job. - - - Attributes: - driver_log_levels: - The per-package log levels for the driver. This may include - "root" package name to configure rootLogger. Examples: - 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.LoggingConfig) - ), -) -_sym_db.RegisterMessage(LoggingConfig) -_sym_db.RegisterMessage(LoggingConfig.DriverLogLevelsEntry) - -HadoopJob = _reflection.GeneratedProtocolMessageType( - "HadoopJob", - (_message.Message,), - dict( - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_HADOOPJOB_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.HadoopJob.PropertiesEntry) - ), - ), - DESCRIPTOR=_HADOOPJOB, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Dataproc job for running `Apache Hadoop - MapReduce `__ - jobs on `Apache Hadoop - YARN `__. - - - Attributes: - driver: - Required. Indicates the location of the driver's main class. - Specify either the jar file that contains the main class or - the main class name. To specify both, add the jar file to - ``jar_file_uris``, and then specify the main class name in - this property. - main_jar_file_uri: - The HCFS URI of the jar file containing the main class. - Examples: 'gs://foo-bucket/analytics-binaries/extract-useful- - metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' - 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce- - examples.jar' - main_class: - The name of the driver's main class. The jar file containing - the class must be in the default CLASSPATH or specified in - ``jar_file_uris``. - args: - Optional. The arguments to pass to the driver. Do not include - arguments, such as ``-libjars`` or ``-Dfoo=bar``, that can be - set as job properties, since a collision may occur that causes - an incorrect job submission. - jar_file_uris: - Optional. Jar file URIs to add to the CLASSPATHs of the Hadoop - driver and tasks. - file_uris: - Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to - be copied to the working directory of Hadoop drivers and - distributed tasks. Useful for naively parallel tasks. - archive_uris: - Optional. HCFS URIs of archives to be extracted in the working - directory of Hadoop drivers and tasks. Supported file types: - .jar, .tar, .tar.gz, .tgz, or .zip. - properties: - Optional. A mapping of property names to values, used to - configure Hadoop. Properties that conflict with values set by - the Dataproc API may be overwritten. Can include properties - set in /etc/hadoop/conf/\*-site and classes in user code. - logging_config: - Optional. The runtime log config for job execution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.HadoopJob) - ), -) -_sym_db.RegisterMessage(HadoopJob) -_sym_db.RegisterMessage(HadoopJob.PropertiesEntry) - -SparkJob = _reflection.GeneratedProtocolMessageType( - "SparkJob", - (_message.Message,), - dict( - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_SPARKJOB_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SparkJob.PropertiesEntry) - ), - ), - DESCRIPTOR=_SPARKJOB, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Dataproc job for running `Apache - Spark `__ applications on YARN. - - - Attributes: - driver: - Required. The specification of the main method to call to - drive the job. Specify either the jar file that contains the - main class or the main class name. To pass both a main jar and - a main class in that jar, add the jar to - ``CommonJob.jar_file_uris``, and then specify the main class - name in ``main_class``. - main_jar_file_uri: - The HCFS URI of the jar file that contains the main class. - main_class: - The name of the driver's main class. The jar file that - contains the class must be in the default CLASSPATH or - specified in ``jar_file_uris``. - args: - Optional. The arguments to pass to the driver. Do not include - arguments, such as ``--conf``, that can be set as job - properties, since a collision may occur that causes an - incorrect job submission. - jar_file_uris: - Optional. HCFS URIs of jar files to add to the CLASSPATHs of - the Spark driver and tasks. - file_uris: - Optional. HCFS URIs of files to be copied to the working - directory of Spark drivers and distributed tasks. Useful for - naively parallel tasks. - archive_uris: - Optional. HCFS URIs of archives to be extracted in the working - directory of Spark drivers and tasks. Supported file types: - .jar, .tar, .tar.gz, .tgz, and .zip. - properties: - Optional. A mapping of property names to values, used to - configure Spark. Properties that conflict with values set by - the Dataproc API may be overwritten. Can include properties - set in /etc/spark/conf/spark-defaults.conf and classes in user - code. - logging_config: - Optional. The runtime log config for job execution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SparkJob) - ), -) -_sym_db.RegisterMessage(SparkJob) -_sym_db.RegisterMessage(SparkJob.PropertiesEntry) - -PySparkJob = _reflection.GeneratedProtocolMessageType( - "PySparkJob", - (_message.Message,), - dict( - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_PYSPARKJOB_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.PySparkJob.PropertiesEntry) - ), - ), - DESCRIPTOR=_PYSPARKJOB, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Dataproc job for running `Apache - PySpark `__ - applications on YARN. - - - Attributes: - main_python_file_uri: - Required. The HCFS URI of the main Python file to use as the - driver. Must be a .py file. - args: - Optional. The arguments to pass to the driver. Do not include - arguments, such as ``--conf``, that can be set as job - properties, since a collision may occur that causes an - incorrect job submission. - python_file_uris: - Optional. HCFS file URIs of Python files to pass to the - PySpark framework. Supported file types: .py, .egg, and .zip. - jar_file_uris: - Optional. HCFS URIs of jar files to add to the CLASSPATHs of - the Python driver and tasks. - file_uris: - Optional. HCFS URIs of files to be copied to the working - directory of Python drivers and distributed tasks. Useful for - naively parallel tasks. - archive_uris: - Optional. HCFS URIs of archives to be extracted in the working - directory of .jar, .tar, .tar.gz, .tgz, and .zip. - properties: - Optional. A mapping of property names to values, used to - configure PySpark. Properties that conflict with values set by - the Dataproc API may be overwritten. Can include properties - set in /etc/spark/conf/spark-defaults.conf and classes in user - code. - logging_config: - Optional. The runtime log config for job execution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.PySparkJob) - ), -) -_sym_db.RegisterMessage(PySparkJob) -_sym_db.RegisterMessage(PySparkJob.PropertiesEntry) - -QueryList = _reflection.GeneratedProtocolMessageType( - "QueryList", - (_message.Message,), - dict( - DESCRIPTOR=_QUERYLIST, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A list of queries to run on a cluster. - - - Attributes: - queries: - Required. The queries to execute. You do not need to terminate - a query with a semicolon. Multiple queries can be specified in - one string by separating each with a semicolon. Here is an - example of an Cloud Dataproc API snippet that uses a QueryList - to specify a HiveJob: :: "hiveJob": { "queryList": - { "queries": [ "query1", "query2", - "query3;query4", ] } } - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.QueryList) - ), -) -_sym_db.RegisterMessage(QueryList) - -HiveJob = _reflection.GeneratedProtocolMessageType( - "HiveJob", - (_message.Message,), - dict( - ScriptVariablesEntry=_reflection.GeneratedProtocolMessageType( - "ScriptVariablesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_HIVEJOB_SCRIPTVARIABLESENTRY, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntry) - ), - ), - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_HIVEJOB_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.HiveJob.PropertiesEntry) - ), - ), - DESCRIPTOR=_HIVEJOB, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Dataproc job for running `Apache - Hive `__ queries on YARN. - - - Attributes: - queries: - Required. The sequence of Hive queries to execute, specified - as either an HCFS file URI or a list of queries. - query_file_uri: - The HCFS URI of the script that contains Hive queries. - query_list: - A list of queries. - continue_on_failure: - Optional. Whether to continue executing queries if a query - fails. The default value is ``false``. Setting to ``true`` can - be useful when executing independent parallel queries. - script_variables: - Optional. Mapping of query variable names to values - (equivalent to the Hive command: ``SET name="value";``). - properties: - Optional. A mapping of property names and values, used to - configure Hive. Properties that conflict with values set by - the Dataproc API may be overwritten. Can include properties - set in /etc/hadoop/conf/\*-site.xml, /etc/hive/conf/hive- - site.xml, and classes in user code. - jar_file_uris: - Optional. HCFS URIs of jar files to add to the CLASSPATH of - the Hive server and Hadoop MapReduce (MR) tasks. Can contain - Hive SerDes and UDFs. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.HiveJob) - ), -) -_sym_db.RegisterMessage(HiveJob) -_sym_db.RegisterMessage(HiveJob.ScriptVariablesEntry) -_sym_db.RegisterMessage(HiveJob.PropertiesEntry) - -SparkSqlJob = _reflection.GeneratedProtocolMessageType( - "SparkSqlJob", - (_message.Message,), - dict( - ScriptVariablesEntry=_reflection.GeneratedProtocolMessageType( - "ScriptVariablesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_SPARKSQLJOB_SCRIPTVARIABLESENTRY, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntry) - ), - ), - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_SPARKSQLJOB_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntry) - ), - ), - DESCRIPTOR=_SPARKSQLJOB, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Dataproc job for running `Apache Spark - SQL `__ queries. - - - Attributes: - queries: - Required. The sequence of Spark SQL queries to execute, - specified as either an HCFS file URI or as a list of queries. - query_file_uri: - The HCFS URI of the script that contains SQL queries. - query_list: - A list of queries. - script_variables: - Optional. Mapping of query variable names to values - (equivalent to the Spark SQL command: SET ``name="value";``). - properties: - Optional. A mapping of property names to values, used to - configure Spark SQL's SparkConf. Properties that conflict with - values set by the Dataproc API may be overwritten. - jar_file_uris: - Optional. HCFS URIs of jar files to be added to the Spark - CLASSPATH. - logging_config: - Optional. The runtime log config for job execution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SparkSqlJob) - ), -) -_sym_db.RegisterMessage(SparkSqlJob) -_sym_db.RegisterMessage(SparkSqlJob.ScriptVariablesEntry) -_sym_db.RegisterMessage(SparkSqlJob.PropertiesEntry) - -PigJob = _reflection.GeneratedProtocolMessageType( - "PigJob", - (_message.Message,), - dict( - ScriptVariablesEntry=_reflection.GeneratedProtocolMessageType( - "ScriptVariablesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_PIGJOB_SCRIPTVARIABLESENTRY, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.PigJob.ScriptVariablesEntry) - ), - ), - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_PIGJOB_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.PigJob.PropertiesEntry) - ), - ), - DESCRIPTOR=_PIGJOB, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Dataproc job for running `Apache - Pig `__ queries on YARN. - - - Attributes: - queries: - Required. The sequence of Pig queries to execute, specified as - an HCFS file URI or a list of queries. - query_file_uri: - The HCFS URI of the script that contains the Pig queries. - query_list: - A list of queries. - continue_on_failure: - Optional. Whether to continue executing queries if a query - fails. The default value is ``false``. Setting to ``true`` can - be useful when executing independent parallel queries. - script_variables: - Optional. Mapping of query variable names to values - (equivalent to the Pig command: ``name=[value]``). - properties: - Optional. A mapping of property names to values, used to - configure Pig. Properties that conflict with values set by the - Dataproc API may be overwritten. Can include properties set in - /etc/hadoop/conf/\*-site.xml, /etc/pig/conf/pig.properties, - and classes in user code. - jar_file_uris: - Optional. HCFS URIs of jar files to add to the CLASSPATH of - the Pig Client and Hadoop MapReduce (MR) tasks. Can contain - Pig UDFs. - logging_config: - Optional. The runtime log config for job execution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.PigJob) - ), -) -_sym_db.RegisterMessage(PigJob) -_sym_db.RegisterMessage(PigJob.ScriptVariablesEntry) -_sym_db.RegisterMessage(PigJob.PropertiesEntry) - -JobPlacement = _reflection.GeneratedProtocolMessageType( - "JobPlacement", - (_message.Message,), - dict( - DESCRIPTOR=_JOBPLACEMENT, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""Dataproc job config. - - - Attributes: - cluster_name: - Required. The name of the cluster where the job will be - submitted. - cluster_uuid: - Output only. A cluster UUID generated by the Dataproc service - when the job is submitted. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.JobPlacement) - ), -) -_sym_db.RegisterMessage(JobPlacement) - -JobStatus = _reflection.GeneratedProtocolMessageType( - "JobStatus", - (_message.Message,), - dict( - DESCRIPTOR=_JOBSTATUS, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""Dataproc job status. - - - Attributes: - state: - Output only. A state message specifying the overall job state. - details: - Optional. Output only. Job state details, such as an error - description if the state is ERROR. - state_start_time: - Output only. The time when this state was entered. - substate: - Output only. Additional state information, which includes - status reported by the agent. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.JobStatus) - ), -) -_sym_db.RegisterMessage(JobStatus) - -JobReference = _reflection.GeneratedProtocolMessageType( - "JobReference", - (_message.Message,), - dict( - DESCRIPTOR=_JOBREFERENCE, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""Encapsulates the full scoping used to reference a job. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - job_id: - Optional. The job ID, which must be unique within the project. - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (\_), or hyphens (-). The maximum length is 100 - characters. If not specified by the caller, the job ID will - be provided by the server. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.JobReference) - ), -) -_sym_db.RegisterMessage(JobReference) - -YarnApplication = _reflection.GeneratedProtocolMessageType( - "YarnApplication", - (_message.Message,), - dict( - DESCRIPTOR=_YARNAPPLICATION, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A YARN application created by a job. Application - information is a subset of - org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. - - **Beta Feature**: This report is available for testing purposes only. It - may be changed before final release. - - - Attributes: - name: - Required. The application name. - state: - Required. The application state. - progress: - Required. The numerical progress of the application, from 1 to - 100. - tracking_url: - Optional. The HTTP URL of the ApplicationMaster, - HistoryServer, or TimelineServer that provides application- - specific information. The URL uses the internal hostname, and - requires a proxy server for resolution and, possibly, access. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.YarnApplication) - ), -) -_sym_db.RegisterMessage(YarnApplication) - -Job = _reflection.GeneratedProtocolMessageType( - "Job", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_JOB_LABELSENTRY, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.Job.LabelsEntry) - ), - ), - DESCRIPTOR=_JOB, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Dataproc job resource. - - - Attributes: - reference: - Optional. The fully qualified reference to the job, which can - be used to obtain the equivalent REST path of the job - resource. If this property is not specified when a job is - created, the server generates a job\_id. - placement: - Required. Job information, including how, when, and where to - run the job. - type_job: - Required. The application/framework-specific portion of the - job. - hadoop_job: - Job is a Hadoop job. - spark_job: - Job is a Spark job. - pyspark_job: - Job is a Pyspark job. - hive_job: - Job is a Hive job. - pig_job: - Job is a Pig job. - spark_sql_job: - Job is a SparkSql job. - status: - Output only. The job status. Additional application-specific - status information may be contained in the type\_job and - yarn\_applications fields. - status_history: - Output only. The previous job status. - yarn_applications: - Output only. The collection of YARN applications spun up by - this job. **Beta** Feature: This report is available for - testing purposes only. It may be changed before final release. - driver_output_resource_uri: - Output only. A URI pointing to the location of the stdout of - the job's driver program. - driver_control_files_uri: - Output only. If present, the location of miscellaneous control - files which may be used as part of job setup and handling. If - not present, control files may be placed in the same location - as ``driver_output_uri``. - labels: - Optional. The labels to associate with this job. Label - **keys** must contain 1 to 63 characters, and must conform to - `RFC 1035 `__. Label - **values** may be empty, but, if present, must contain 1 to 63 - characters, and must conform to `RFC 1035 - `__. No more than 32 - labels can be associated with a job. - scheduling: - Optional. Job scheduling configuration. - job_uuid: - Output only. A UUID that uniquely identifies a job within the - project over time. This is in contrast to a user-settable - reference.job\_id that may be reused over time. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.Job) - ), -) -_sym_db.RegisterMessage(Job) -_sym_db.RegisterMessage(Job.LabelsEntry) - -JobScheduling = _reflection.GeneratedProtocolMessageType( - "JobScheduling", - (_message.Message,), - dict( - DESCRIPTOR=_JOBSCHEDULING, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""Job scheduling options. - - - Attributes: - max_failures_per_hour: - Optional. Maximum number of times per hour a driver may be - restarted as a result of driver terminating with non-zero code - before job is reported failed. A job may be reported as - thrashing if driver exits with non-zero code 4 times within 10 - minute window. Maximum value is 10. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.JobScheduling) - ), -) -_sym_db.RegisterMessage(JobScheduling) - -SubmitJobRequest = _reflection.GeneratedProtocolMessageType( - "SubmitJobRequest", - (_message.Message,), - dict( - DESCRIPTOR=_SUBMITJOBREQUEST, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A request to submit a job. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - region: - Required. The Dataproc region in which to handle the request. - job: - Required. The job resource. - request_id: - Optional. A unique id used to identify the request. If the - server receives two - [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] - requests with the same id, then the second request will be - ignored and the first [Job][google.cloud.dataproc.v1.Job] - created and stored in the backend is returned. It is - recommended to always set this value to a `UUID `__. The id - must contain only letters (a-z, A-Z), numbers (0-9), - underscores (\_), and hyphens (-). The maximum length is 40 - characters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SubmitJobRequest) - ), -) -_sym_db.RegisterMessage(SubmitJobRequest) - -GetJobRequest = _reflection.GeneratedProtocolMessageType( - "GetJobRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETJOBREQUEST, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A request to get the resource representation for a job in - a project. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - region: - Required. The Dataproc region in which to handle the request. - job_id: - Required. The job ID. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.GetJobRequest) - ), -) -_sym_db.RegisterMessage(GetJobRequest) - -ListJobsRequest = _reflection.GeneratedProtocolMessageType( - "ListJobsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTJOBSREQUEST, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A request to list jobs in a project. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - region: - Required. The Dataproc region in which to handle the request. - page_size: - Optional. The number of results to return in each response. - page_token: - Optional. The page token, returned by a previous call, to - request the next page of results. - cluster_name: - Optional. If set, the returned jobs list includes only jobs - that were submitted to the named cluster. - job_state_matcher: - Optional. Specifies enumerated categories of jobs to list. - (default = match ALL jobs). If ``filter`` is provided, - ``jobStateMatcher`` will be ignored. - filter: - Optional. A filter constraining the jobs to list. Filters are - case-sensitive and have the following syntax: [field = value] - AND [field [= value]] ... where **field** is ``status.state`` - or ``labels.[KEY]``, and ``[KEY]`` is a label key. **value** - can be ``*`` to match all values. ``status.state`` can be - either ``ACTIVE`` or ``NON_ACTIVE``. Only the logical ``AND`` - operator is supported; space-separated items are treated as - having an implicit ``AND`` operator. Example filter: - status.state = ACTIVE AND labels.env = staging AND - labels.starred = \* - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ListJobsRequest) - ), -) -_sym_db.RegisterMessage(ListJobsRequest) - -UpdateJobRequest = _reflection.GeneratedProtocolMessageType( - "UpdateJobRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATEJOBREQUEST, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A request to update a job. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - region: - Required. The Dataproc region in which to handle the request. - job_id: - Required. The job ID. - job: - Required. The changes to the job. - update_mask: - Required. Specifies the path, relative to Job, of the field to - update. For example, to update the labels of a Job the - update\_mask parameter would be specified as labels, and the - ``PATCH`` request body would specify the new value. Note: - Currently, labels is the only field that can be updated. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.UpdateJobRequest) - ), -) -_sym_db.RegisterMessage(UpdateJobRequest) - -ListJobsResponse = _reflection.GeneratedProtocolMessageType( - "ListJobsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTJOBSRESPONSE, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A list of jobs in a project. - - - Attributes: - jobs: - Output only. Jobs list. - next_page_token: - Optional. This token is included in the response if there are - more results to fetch. To fetch additional results, provide - this value as the ``page_token`` in a subsequent - ListJobsRequest. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ListJobsResponse) - ), -) -_sym_db.RegisterMessage(ListJobsResponse) - -CancelJobRequest = _reflection.GeneratedProtocolMessageType( - "CancelJobRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CANCELJOBREQUEST, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A request to cancel a job. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - region: - Required. The Dataproc region in which to handle the request. - job_id: - Required. The job ID. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.CancelJobRequest) - ), -) -_sym_db.RegisterMessage(CancelJobRequest) - -DeleteJobRequest = _reflection.GeneratedProtocolMessageType( - "DeleteJobRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEJOBREQUEST, - __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A request to delete a job. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - region: - Required. The Dataproc region in which to handle the request. - job_id: - Required. The job ID. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.DeleteJobRequest) - ), -) -_sym_db.RegisterMessage(DeleteJobRequest) - - -DESCRIPTOR._options = None -_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY._options = None -_HADOOPJOB_PROPERTIESENTRY._options = None -_HADOOPJOB.fields_by_name["args"]._options = None -_HADOOPJOB.fields_by_name["jar_file_uris"]._options = None -_HADOOPJOB.fields_by_name["file_uris"]._options = None -_HADOOPJOB.fields_by_name["archive_uris"]._options = None -_HADOOPJOB.fields_by_name["properties"]._options = None -_HADOOPJOB.fields_by_name["logging_config"]._options = None -_SPARKJOB_PROPERTIESENTRY._options = None -_SPARKJOB.fields_by_name["args"]._options = None -_SPARKJOB.fields_by_name["jar_file_uris"]._options = None -_SPARKJOB.fields_by_name["file_uris"]._options = None -_SPARKJOB.fields_by_name["archive_uris"]._options = None -_SPARKJOB.fields_by_name["properties"]._options = None -_SPARKJOB.fields_by_name["logging_config"]._options = None -_PYSPARKJOB_PROPERTIESENTRY._options = None -_PYSPARKJOB.fields_by_name["main_python_file_uri"]._options = None -_PYSPARKJOB.fields_by_name["args"]._options = None -_PYSPARKJOB.fields_by_name["python_file_uris"]._options = None -_PYSPARKJOB.fields_by_name["jar_file_uris"]._options = None -_PYSPARKJOB.fields_by_name["file_uris"]._options = None -_PYSPARKJOB.fields_by_name["archive_uris"]._options = None -_PYSPARKJOB.fields_by_name["properties"]._options = None -_PYSPARKJOB.fields_by_name["logging_config"]._options = None -_QUERYLIST.fields_by_name["queries"]._options = None -_HIVEJOB_SCRIPTVARIABLESENTRY._options = None -_HIVEJOB_PROPERTIESENTRY._options = None -_HIVEJOB.fields_by_name["continue_on_failure"]._options = None -_HIVEJOB.fields_by_name["script_variables"]._options = None -_HIVEJOB.fields_by_name["properties"]._options = None -_HIVEJOB.fields_by_name["jar_file_uris"]._options = None -_SPARKSQLJOB_SCRIPTVARIABLESENTRY._options = None -_SPARKSQLJOB_PROPERTIESENTRY._options = None -_SPARKSQLJOB.fields_by_name["script_variables"]._options = None -_SPARKSQLJOB.fields_by_name["properties"]._options = None -_SPARKSQLJOB.fields_by_name["jar_file_uris"]._options = None -_SPARKSQLJOB.fields_by_name["logging_config"]._options = None -_PIGJOB_SCRIPTVARIABLESENTRY._options = None -_PIGJOB_PROPERTIESENTRY._options = None -_PIGJOB.fields_by_name["continue_on_failure"]._options = None -_PIGJOB.fields_by_name["script_variables"]._options = None -_PIGJOB.fields_by_name["properties"]._options = None -_PIGJOB.fields_by_name["jar_file_uris"]._options = None -_PIGJOB.fields_by_name["logging_config"]._options = None -_JOBPLACEMENT.fields_by_name["cluster_name"]._options = None -_JOBPLACEMENT.fields_by_name["cluster_uuid"]._options = None -_JOBSTATUS.fields_by_name["state"]._options = None -_JOBSTATUS.fields_by_name["details"]._options = None -_JOBSTATUS.fields_by_name["state_start_time"]._options = None -_JOBSTATUS.fields_by_name["substate"]._options = None -_JOBREFERENCE.fields_by_name["project_id"]._options = None -_JOBREFERENCE.fields_by_name["job_id"]._options = None -_YARNAPPLICATION.fields_by_name["name"]._options = None -_YARNAPPLICATION.fields_by_name["state"]._options = None -_YARNAPPLICATION.fields_by_name["progress"]._options = None -_YARNAPPLICATION.fields_by_name["tracking_url"]._options = None -_JOB_LABELSENTRY._options = None -_JOB.fields_by_name["reference"]._options = None -_JOB.fields_by_name["placement"]._options = None -_JOB.fields_by_name["status"]._options = None -_JOB.fields_by_name["status_history"]._options = None -_JOB.fields_by_name["yarn_applications"]._options = None -_JOB.fields_by_name["driver_output_resource_uri"]._options = None -_JOB.fields_by_name["driver_control_files_uri"]._options = None -_JOB.fields_by_name["labels"]._options = None -_JOB.fields_by_name["scheduling"]._options = None -_JOB.fields_by_name["job_uuid"]._options = None -_JOBSCHEDULING.fields_by_name["max_failures_per_hour"]._options = None -_SUBMITJOBREQUEST.fields_by_name["project_id"]._options = None -_SUBMITJOBREQUEST.fields_by_name["region"]._options = None -_SUBMITJOBREQUEST.fields_by_name["job"]._options = None -_SUBMITJOBREQUEST.fields_by_name["request_id"]._options = None -_GETJOBREQUEST.fields_by_name["project_id"]._options = None -_GETJOBREQUEST.fields_by_name["region"]._options = None -_GETJOBREQUEST.fields_by_name["job_id"]._options = None -_LISTJOBSREQUEST.fields_by_name["project_id"]._options = None -_LISTJOBSREQUEST.fields_by_name["region"]._options = None -_LISTJOBSREQUEST.fields_by_name["page_size"]._options = None -_LISTJOBSREQUEST.fields_by_name["page_token"]._options = None -_LISTJOBSREQUEST.fields_by_name["cluster_name"]._options = None -_LISTJOBSREQUEST.fields_by_name["job_state_matcher"]._options = None -_LISTJOBSREQUEST.fields_by_name["filter"]._options = None -_UPDATEJOBREQUEST.fields_by_name["project_id"]._options = None -_UPDATEJOBREQUEST.fields_by_name["region"]._options = None -_UPDATEJOBREQUEST.fields_by_name["job_id"]._options = None -_UPDATEJOBREQUEST.fields_by_name["job"]._options = None -_UPDATEJOBREQUEST.fields_by_name["update_mask"]._options = None -_LISTJOBSRESPONSE.fields_by_name["jobs"]._options = None -_LISTJOBSRESPONSE.fields_by_name["next_page_token"]._options = None -_CANCELJOBREQUEST.fields_by_name["project_id"]._options = None -_CANCELJOBREQUEST.fields_by_name["region"]._options = None -_CANCELJOBREQUEST.fields_by_name["job_id"]._options = None -_DELETEJOBREQUEST.fields_by_name["project_id"]._options = None -_DELETEJOBREQUEST.fields_by_name["region"]._options = None -_DELETEJOBREQUEST.fields_by_name["job_id"]._options = None - -_JOBCONTROLLER = _descriptor.ServiceDescriptor( - name="JobController", - full_name="google.cloud.dataproc.v1.JobController", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" - ), - serialized_start=6104, - serialized_end=7283, - methods=[ - _descriptor.MethodDescriptor( - name="SubmitJob", - full_name="google.cloud.dataproc.v1.JobController.SubmitJob", - index=0, - containing_service=None, - input_type=_SUBMITJOBREQUEST, - output_type=_JOB, - serialized_options=_b( - '\202\323\344\223\002;"6/v1/projects/{project_id}/regions/{region}/jobs:submit:\001*\332A\025project_id,region,job' - ), - ), - _descriptor.MethodDescriptor( - name="GetJob", - full_name="google.cloud.dataproc.v1.JobController.GetJob", - index=1, - containing_service=None, - input_type=_GETJOBREQUEST, - output_type=_JOB, - serialized_options=_b( - "\202\323\344\223\002:\0228/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\332A\030project_id,region,job_id" - ), - ), - _descriptor.MethodDescriptor( - name="ListJobs", - full_name="google.cloud.dataproc.v1.JobController.ListJobs", - index=2, - containing_service=None, - input_type=_LISTJOBSREQUEST, - output_type=_LISTJOBSRESPONSE, - serialized_options=_b( - "\202\323\344\223\0021\022//v1/projects/{project_id}/regions/{region}/jobs\332A\021project_id,region\332A\030project_id,region,filter" - ), - ), - _descriptor.MethodDescriptor( - name="UpdateJob", - full_name="google.cloud.dataproc.v1.JobController.UpdateJob", - index=3, - containing_service=None, - input_type=_UPDATEJOBREQUEST, - output_type=_JOB, - serialized_options=_b( - "\202\323\344\223\002?28/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:\003job" - ), - ), - _descriptor.MethodDescriptor( - name="CancelJob", - full_name="google.cloud.dataproc.v1.JobController.CancelJob", - index=4, - containing_service=None, - input_type=_CANCELJOBREQUEST, - output_type=_JOB, - serialized_options=_b( - '\202\323\344\223\002D"?/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\001*\332A\030project_id,region,job_id' - ), - ), - _descriptor.MethodDescriptor( - name="DeleteJob", - full_name="google.cloud.dataproc.v1.JobController.DeleteJob", - index=5, - containing_service=None, - input_type=_DELETEJOBREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002:*8/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\332A\030project_id,region,job_id" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_JOBCONTROLLER) - -DESCRIPTOR.services_by_name["JobController"] = _JOBCONTROLLER - -# @@protoc_insertion_point(module_scope) diff --git a/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py deleted file mode 100644 index d2706382307b..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py +++ /dev/null @@ -1,140 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.dataproc_v1.proto import ( - jobs_pb2 as google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class JobControllerStub(object): - """The JobController provides methods to manage jobs. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.SubmitJob = channel.unary_unary( - "/google.cloud.dataproc.v1.JobController/SubmitJob", - request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.SubmitJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.Job.FromString, - ) - self.GetJob = channel.unary_unary( - "/google.cloud.dataproc.v1.JobController/GetJob", - request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.GetJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.Job.FromString, - ) - self.ListJobs = channel.unary_unary( - "/google.cloud.dataproc.v1.JobController/ListJobs", - request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.ListJobsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.ListJobsResponse.FromString, - ) - self.UpdateJob = channel.unary_unary( - "/google.cloud.dataproc.v1.JobController/UpdateJob", - request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.UpdateJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.Job.FromString, - ) - self.CancelJob = channel.unary_unary( - "/google.cloud.dataproc.v1.JobController/CancelJob", - request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.CancelJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.Job.FromString, - ) - self.DeleteJob = channel.unary_unary( - "/google.cloud.dataproc.v1.JobController/DeleteJob", - request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.DeleteJobRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - -class JobControllerServicer(object): - """The JobController provides methods to manage jobs. - """ - - def SubmitJob(self, request, context): - """Submits a job to a cluster. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetJob(self, request, context): - """Gets the resource representation for a job in a project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListJobs(self, request, context): - """Lists regions/{region}/jobs in a project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateJob(self, request, context): - """Updates a job in a project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CancelJob(self, request, context): - """Starts a job cancellation request. To access the job resource - after cancellation, call - [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) - or - [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteJob(self, request, context): - """Deletes the job from the project. If the job is active, the delete fails, - and the response returns `FAILED_PRECONDITION`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_JobControllerServicer_to_server(servicer, server): - rpc_method_handlers = { - "SubmitJob": grpc.unary_unary_rpc_method_handler( - servicer.SubmitJob, - request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.SubmitJobRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.Job.SerializeToString, - ), - "GetJob": grpc.unary_unary_rpc_method_handler( - servicer.GetJob, - request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.GetJobRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.Job.SerializeToString, - ), - "ListJobs": grpc.unary_unary_rpc_method_handler( - servicer.ListJobs, - request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.ListJobsRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.ListJobsResponse.SerializeToString, - ), - "UpdateJob": grpc.unary_unary_rpc_method_handler( - servicer.UpdateJob, - request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.UpdateJobRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.Job.SerializeToString, - ), - "CancelJob": grpc.unary_unary_rpc_method_handler( - servicer.CancelJob, - request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.CancelJobRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.Job.SerializeToString, - ), - "DeleteJob": grpc.unary_unary_rpc_method_handler( - servicer.DeleteJob, - request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.DeleteJobRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.dataproc.v1.JobController", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/dataproc/google/cloud/dataproc_v1/proto/operations.proto b/dataproc/google/cloud/dataproc_v1/proto/operations.proto deleted file mode 100644 index 4af2a5f80795..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/operations.proto +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.dataproc.v1; - -import "google/api/field_behavior.proto"; -import "google/protobuf/timestamp.proto"; -import "google/api/annotations.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "OperationsProto"; -option java_package = "com.google.cloud.dataproc.v1"; - -// The status of the operation. -message ClusterOperationStatus { - // The operation state. - enum State { - // Unused. - UNKNOWN = 0; - - // The operation has been created. - PENDING = 1; - - // The operation is running. - RUNNING = 2; - - // The operation is done; either cancelled or completed. - DONE = 3; - } - - // Output only. A message containing the operation state. - State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. A message containing the detailed operation state. - string inner_state = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. A message containing any operation metadata details. - string details = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The time this state was entered. - google.protobuf.Timestamp state_start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Metadata describing the operation. -message ClusterOperationMetadata { - // Output only. Name of the cluster for the operation. - string cluster_name = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Cluster UUID for the operation. - string cluster_uuid = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Current operation status. - ClusterOperationStatus status = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The previous operation status. - repeated ClusterOperationStatus status_history = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The operation type. - string operation_type = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Short description of operation. - string description = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Labels associated with the operation - map labels = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Errors encountered during operation execution. - repeated string warnings = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; -} diff --git a/dataproc/google/cloud/dataproc_v1/proto/operations_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/operations_pb2.py deleted file mode 100644 index f7fadd195d52..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/operations_pb2.py +++ /dev/null @@ -1,485 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dataproc_v1/proto/operations.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dataproc_v1/proto/operations.proto", - package="google.cloud.dataproc.v1", - syntax="proto3", - serialized_options=_b( - "\n\034com.google.cloud.dataproc.v1B\017OperationsProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" - ), - serialized_pb=_b( - '\n/google/cloud/dataproc_v1/proto/operations.proto\x12\x18google.cloud.dataproc.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\x89\x02\n\x16\x43lusterOperationStatus\x12J\n\x05state\x18\x01 \x01(\x0e\x32\x36.google.cloud.dataproc.v1.ClusterOperationStatus.StateB\x03\xe0\x41\x03\x12\x18\n\x0binner_state\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07\x64\x65tails\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x10state_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"\xb8\x03\n\x18\x43lusterOperationMetadata\x12\x19\n\x0c\x63luster_name\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x08 \x01(\tB\x03\xe0\x41\x03\x12\x45\n\x06status\x18\t \x01(\x0b\x32\x30.google.cloud.dataproc.v1.ClusterOperationStatusB\x03\xe0\x41\x03\x12M\n\x0estatus_history\x18\n \x03(\x0b\x32\x30.google.cloud.dataproc.v1.ClusterOperationStatusB\x03\xe0\x41\x03\x12\x1b\n\x0eoperation_type\x18\x0b \x01(\tB\x03\xe0\x41\x03\x12\x18\n\x0b\x64\x65scription\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12S\n\x06labels\x18\r \x03(\x0b\x32>.google.cloud.dataproc.v1.ClusterOperationMetadata.LabelsEntryB\x03\xe0\x41\x03\x12\x15\n\x08warnings\x18\x0e \x03(\tB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42s\n\x1c\x63om.google.cloud.dataproc.v1B\x0fOperationsProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' - ), - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_CLUSTEROPERATIONSTATUS_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.cloud.dataproc.v1.ClusterOperationStatus.State", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="UNKNOWN", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PENDING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DONE", index=3, number=3, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=383, - serialized_end=439, -) -_sym_db.RegisterEnumDescriptor(_CLUSTEROPERATIONSTATUS_STATE) - - -_CLUSTEROPERATIONSTATUS = _descriptor.Descriptor( - name="ClusterOperationStatus", - full_name="google.cloud.dataproc.v1.ClusterOperationStatus", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.dataproc.v1.ClusterOperationStatus.state", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="inner_state", - full_name="google.cloud.dataproc.v1.ClusterOperationStatus.inner_state", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="details", - full_name="google.cloud.dataproc.v1.ClusterOperationStatus.details", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="state_start_time", - full_name="google.cloud.dataproc.v1.ClusterOperationStatus.state_start_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_CLUSTEROPERATIONSTATUS_STATE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=174, - serialized_end=439, -) - - -_CLUSTEROPERATIONMETADATA_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.cloud.dataproc.v1.ClusterOperationMetadata.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1.ClusterOperationMetadata.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1.ClusterOperationMetadata.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=837, - serialized_end=882, -) - -_CLUSTEROPERATIONMETADATA = _descriptor.Descriptor( - name="ClusterOperationMetadata", - full_name="google.cloud.dataproc.v1.ClusterOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1.ClusterOperationMetadata.cluster_name", - index=0, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_uuid", - full_name="google.cloud.dataproc.v1.ClusterOperationMetadata.cluster_uuid", - index=1, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status", - full_name="google.cloud.dataproc.v1.ClusterOperationMetadata.status", - index=2, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status_history", - full_name="google.cloud.dataproc.v1.ClusterOperationMetadata.status_history", - index=3, - number=10, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="operation_type", - full_name="google.cloud.dataproc.v1.ClusterOperationMetadata.operation_type", - index=4, - number=11, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.dataproc.v1.ClusterOperationMetadata.description", - index=5, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.cloud.dataproc.v1.ClusterOperationMetadata.labels", - index=6, - number=13, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="warnings", - full_name="google.cloud.dataproc.v1.ClusterOperationMetadata.warnings", - index=7, - number=14, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_CLUSTEROPERATIONMETADATA_LABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=442, - serialized_end=882, -) - -_CLUSTEROPERATIONSTATUS.fields_by_name[ - "state" -].enum_type = _CLUSTEROPERATIONSTATUS_STATE -_CLUSTEROPERATIONSTATUS.fields_by_name[ - "state_start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_CLUSTEROPERATIONSTATUS_STATE.containing_type = _CLUSTEROPERATIONSTATUS -_CLUSTEROPERATIONMETADATA_LABELSENTRY.containing_type = _CLUSTEROPERATIONMETADATA -_CLUSTEROPERATIONMETADATA.fields_by_name[ - "status" -].message_type = _CLUSTEROPERATIONSTATUS -_CLUSTEROPERATIONMETADATA.fields_by_name[ - "status_history" -].message_type = _CLUSTEROPERATIONSTATUS -_CLUSTEROPERATIONMETADATA.fields_by_name[ - "labels" -].message_type = _CLUSTEROPERATIONMETADATA_LABELSENTRY -DESCRIPTOR.message_types_by_name["ClusterOperationStatus"] = _CLUSTEROPERATIONSTATUS -DESCRIPTOR.message_types_by_name["ClusterOperationMetadata"] = _CLUSTEROPERATIONMETADATA -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ClusterOperationStatus = _reflection.GeneratedProtocolMessageType( - "ClusterOperationStatus", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTEROPERATIONSTATUS, - __module__="google.cloud.dataproc_v1.proto.operations_pb2", - __doc__="""The status of the operation. - - - Attributes: - state: - Output only. A message containing the operation state. - inner_state: - Output only. A message containing the detailed operation - state. - details: - Output only. A message containing any operation metadata - details. - state_start_time: - Output only. The time this state was entered. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterOperationStatus) - ), -) -_sym_db.RegisterMessage(ClusterOperationStatus) - -ClusterOperationMetadata = _reflection.GeneratedProtocolMessageType( - "ClusterOperationMetadata", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTEROPERATIONMETADATA_LABELSENTRY, - __module__="google.cloud.dataproc_v1.proto.operations_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterOperationMetadata.LabelsEntry) - ), - ), - DESCRIPTOR=_CLUSTEROPERATIONMETADATA, - __module__="google.cloud.dataproc_v1.proto.operations_pb2", - __doc__="""Metadata describing the operation. - - - Attributes: - cluster_name: - Output only. Name of the cluster for the operation. - cluster_uuid: - Output only. Cluster UUID for the operation. - status: - Output only. Current operation status. - status_history: - Output only. The previous operation status. - operation_type: - Output only. The operation type. - description: - Output only. Short description of operation. - labels: - Output only. Labels associated with the operation - warnings: - Output only. Errors encountered during operation execution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterOperationMetadata) - ), -) -_sym_db.RegisterMessage(ClusterOperationMetadata) -_sym_db.RegisterMessage(ClusterOperationMetadata.LabelsEntry) - - -DESCRIPTOR._options = None -_CLUSTEROPERATIONSTATUS.fields_by_name["state"]._options = None -_CLUSTEROPERATIONSTATUS.fields_by_name["inner_state"]._options = None -_CLUSTEROPERATIONSTATUS.fields_by_name["details"]._options = None -_CLUSTEROPERATIONSTATUS.fields_by_name["state_start_time"]._options = None -_CLUSTEROPERATIONMETADATA_LABELSENTRY._options = None -_CLUSTEROPERATIONMETADATA.fields_by_name["cluster_name"]._options = None -_CLUSTEROPERATIONMETADATA.fields_by_name["cluster_uuid"]._options = None -_CLUSTEROPERATIONMETADATA.fields_by_name["status"]._options = None -_CLUSTEROPERATIONMETADATA.fields_by_name["status_history"]._options = None -_CLUSTEROPERATIONMETADATA.fields_by_name["operation_type"]._options = None -_CLUSTEROPERATIONMETADATA.fields_by_name["description"]._options = None -_CLUSTEROPERATIONMETADATA.fields_by_name["labels"]._options = None -_CLUSTEROPERATIONMETADATA.fields_by_name["warnings"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/dataproc/google/cloud/dataproc_v1/proto/operations_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1/proto/operations_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/operations_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/dataproc/google/cloud/dataproc_v1/proto/shared.proto b/dataproc/google/cloud/dataproc_v1/proto/shared.proto deleted file mode 100644 index 74bd56a80875..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/shared.proto +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.dataproc.v1; - -import "google/api/annotations.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "SharedProto"; -option java_package = "com.google.cloud.dataproc.v1"; - -// Cluster components that can be activated. -enum Component { - // Unspecified component. - COMPONENT_UNSPECIFIED = 0; - - // The Anaconda python distribution. - ANACONDA = 5; - - // The Hive Web HCatalog (the REST service for accessing HCatalog). - HIVE_WEBHCAT = 3; - - // The Jupyter Notebook. - JUPYTER = 1; - - // The Zeppelin notebook. - ZEPPELIN = 4; -} diff --git a/dataproc/google/cloud/dataproc_v1/proto/shared_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/shared_pb2.py deleted file mode 100644 index 748c8e01d2e9..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/shared_pb2.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dataproc_v1/proto/shared.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dataproc_v1/proto/shared.proto", - package="google.cloud.dataproc.v1", - syntax="proto3", - serialized_options=_b( - "\n\034com.google.cloud.dataproc.v1B\013SharedProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" - ), - serialized_pb=_b( - "\n+google/cloud/dataproc_v1/proto/shared.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto*a\n\tComponent\x12\x19\n\x15\x43OMPONENT_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x41NACONDA\x10\x05\x12\x10\n\x0cHIVE_WEBHCAT\x10\x03\x12\x0b\n\x07JUPYTER\x10\x01\x12\x0c\n\x08ZEPPELIN\x10\x04\x42o\n\x1c\x63om.google.cloud.dataproc.v1B\x0bSharedProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3" - ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], -) - -_COMPONENT = _descriptor.EnumDescriptor( - name="Component", - full_name="google.cloud.dataproc.v1.Component", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="COMPONENT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ANACONDA", index=1, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="HIVE_WEBHCAT", index=2, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="JUPYTER", index=3, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ZEPPELIN", index=4, number=4, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=103, - serialized_end=200, -) -_sym_db.RegisterEnumDescriptor(_COMPONENT) - -Component = enum_type_wrapper.EnumTypeWrapper(_COMPONENT) -COMPONENT_UNSPECIFIED = 0 -ANACONDA = 5 -HIVE_WEBHCAT = 3 -JUPYTER = 1 -ZEPPELIN = 4 - - -DESCRIPTOR.enum_types_by_name["Component"] = _COMPONENT -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/dataproc/google/cloud/dataproc_v1/proto/shared_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1/proto/shared_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/shared_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/dataproc/google/cloud/dataproc_v1/proto/workflow_templates.proto b/dataproc/google/cloud/dataproc_v1/proto/workflow_templates.proto deleted file mode 100644 index 30b5ced47867..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/workflow_templates.proto +++ /dev/null @@ -1,779 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.dataproc.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/dataproc/v1/clusters.proto"; -import "google/cloud/dataproc/v1/jobs.proto"; -import "google/longrunning/operations.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "WorkflowTemplatesProto"; -option java_package = "com.google.cloud.dataproc.v1"; - -// The API interface for managing Workflow Templates in the -// Dataproc API. -service WorkflowTemplateService { - option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates new workflow template. - rpc CreateWorkflowTemplate(CreateWorkflowTemplateRequest) returns (WorkflowTemplate) { - option (google.api.http) = { - post: "/v1/{parent=projects/*/locations/*}/workflowTemplates" - body: "template" - additional_bindings { - post: "/v1/{parent=projects/*/regions/*}/workflowTemplates" - body: "template" - } - }; - option (google.api.method_signature) = "parent,template"; - } - - // Retrieves the latest workflow template. - // - // Can retrieve previously instantiated template by specifying optional - // version parameter. - rpc GetWorkflowTemplate(GetWorkflowTemplateRequest) returns (WorkflowTemplate) { - option (google.api.http) = { - get: "/v1/{name=projects/*/locations/*/workflowTemplates/*}" - additional_bindings { - get: "/v1/{name=projects/*/regions/*/workflowTemplates/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Instantiates a template and begins execution. - // - // The returned Operation can be used to track execution of - // workflow by polling - // [operations.get][google.longrunning.Operations.GetOperation]. - // The Operation will complete when entire workflow is finished. - // - // The running workflow can be aborted via - // [operations.cancel][google.longrunning.Operations.CancelOperation]. - // This will cause any inflight jobs to be cancelled and workflow-owned - // clusters to be deleted. - // - // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). - // Also see [Using - // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). - // - // On successful completion, - // [Operation.response][google.longrunning.Operation.response] will be - // [Empty][google.protobuf.Empty]. - rpc InstantiateWorkflowTemplate(InstantiateWorkflowTemplateRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate" - body: "*" - additional_bindings { - post: "/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate" - body: "*" - } - }; - option (google.api.method_signature) = "name"; - option (google.api.method_signature) = "name,parameters"; - option (google.longrunning.operation_info) = { - response_type: "google.protobuf.Empty" - metadata_type: "WorkflowMetadata" - }; - } - - // Instantiates a template and begins execution. - // - // This method is equivalent to executing the sequence - // [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], - // [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. - // - // The returned Operation can be used to track execution of - // workflow by polling - // [operations.get][google.longrunning.Operations.GetOperation]. - // The Operation will complete when entire workflow is finished. - // - // The running workflow can be aborted via - // [operations.cancel][google.longrunning.Operations.CancelOperation]. - // This will cause any inflight jobs to be cancelled and workflow-owned - // clusters to be deleted. - // - // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). - // Also see [Using - // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). - // - // On successful completion, - // [Operation.response][google.longrunning.Operation.response] will be - // [Empty][google.protobuf.Empty]. - rpc InstantiateInlineWorkflowTemplate(InstantiateInlineWorkflowTemplateRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline" - body: "template" - additional_bindings { - post: "/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline" - body: "template" - } - }; - option (google.api.method_signature) = "parent,template"; - option (google.longrunning.operation_info) = { - response_type: "google.protobuf.Empty" - metadata_type: "WorkflowMetadata" - }; - } - - // Updates (replaces) workflow template. The updated template - // must contain version that matches the current server version. - rpc UpdateWorkflowTemplate(UpdateWorkflowTemplateRequest) returns (WorkflowTemplate) { - option (google.api.http) = { - put: "/v1/{template.name=projects/*/locations/*/workflowTemplates/*}" - body: "template" - additional_bindings { - put: "/v1/{template.name=projects/*/regions/*/workflowTemplates/*}" - body: "template" - } - }; - option (google.api.method_signature) = "template"; - } - - // Lists workflows that match the specified filter in the request. - rpc ListWorkflowTemplates(ListWorkflowTemplatesRequest) returns (ListWorkflowTemplatesResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*/locations/*}/workflowTemplates" - additional_bindings { - get: "/v1/{parent=projects/*/regions/*}/workflowTemplates" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a workflow template. It does not cancel in-progress workflows. - rpc DeleteWorkflowTemplate(DeleteWorkflowTemplateRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/{name=projects/*/locations/*/workflowTemplates/*}" - additional_bindings { - delete: "/v1/{name=projects/*/regions/*/workflowTemplates/*}" - } - }; - option (google.api.method_signature) = "name"; - } -} - -// A Dataproc workflow template resource. -message WorkflowTemplate { - option (google.api.resource) = { - type: "dataproc.googleapis.com/WorkflowTemplate" - pattern: "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}" - pattern: "projects/{project}/locations/{location}/workflowTemplates/{workflow_template}" - history: ORIGINALLY_SINGLE_PATTERN - }; - - string id = 2 [(google.api.field_behavior) = REQUIRED]; - - // Output only. The resource name of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates`, the resource name of the - // template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates`, the resource name of the - // template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. Used to perform a consistent read-modify-write. - // - // This field should be left blank for a `CreateWorkflowTemplate` request. It - // is required for an `UpdateWorkflowTemplate` request, and must match the - // current server version. A typical update template flow would fetch the - // current template with a `GetWorkflowTemplate` request, which will return - // the current template with the `version` field filled in with the - // current server version. The user updates other fields in the template, - // then returns it as part of the `UpdateWorkflowTemplate` request. - int32 version = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The time template was created. - google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The time template was last updated. - google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The labels to associate with this template. These labels - // will be propagated to all jobs and clusters created by the workflow - // instance. - // - // Label **keys** must contain 1 to 63 characters, and must conform to - // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). - // - // Label **values** may be empty, but, if present, must contain 1 to 63 - // characters, and must conform to - // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). - // - // No more than 32 labels can be associated with a template. - map labels = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Required. WorkflowTemplate scheduling information. - WorkflowTemplatePlacement placement = 7 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Directed Acyclic Graph of Jobs to submit. - repeated OrderedJob jobs = 8 [(google.api.field_behavior) = REQUIRED]; - - // Optional. emplate parameters whose values are substituted into the - // template. Values for parameters must be provided when the template is - // instantiated. - repeated TemplateParameter parameters = 9 [(google.api.field_behavior) = OPTIONAL]; -} - -// Specifies workflow execution target. -// -// Either `managed_cluster` or `cluster_selector` is required. -message WorkflowTemplatePlacement { - // Required. Specifies where workflow executes; either on a managed - // cluster or an existing cluster chosen by labels. - oneof placement { - // A cluster that is managed by the workflow. - ManagedCluster managed_cluster = 1; - - // Optional. A selector that chooses target cluster for jobs based - // on metadata. - // - // The selector is evaluated at the time each job is submitted. - ClusterSelector cluster_selector = 2; - } -} - -// Cluster that is managed by the workflow. -message ManagedCluster { - // Required. The cluster name prefix. A unique cluster name will be formed by - // appending a random suffix. - // - // The name must contain only lower-case letters (a-z), numbers (0-9), - // and hyphens (-). Must begin with a letter. Cannot begin or end with - // hyphen. Must consist of between 2 and 35 characters. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster configuration. - ClusterConfig config = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The labels to associate with this cluster. - // - // Label keys must be between 1 and 63 characters long, and must conform to - // the following PCRE regular expression: - // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} - // - // Label values must be between 1 and 63 characters long, and must conform to - // the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} - // - // No more than 32 labels can be associated with a given cluster. - map labels = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// A selector that chooses target cluster for jobs based on metadata. -message ClusterSelector { - // Optional. The zone where workflow process executes. This parameter does not - // affect the selection of the cluster. - // - // If unspecified, the zone of the first cluster matching the selector - // is used. - string zone = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The cluster labels. Cluster must have all labels - // to match. - map cluster_labels = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A job executed by the workflow. -message OrderedJob { - // Required. The step id. The id must be unique among all jobs - // within the template. - // - // The step id is used as prefix for job id, as job - // `goog-dataproc-workflow-step-id` label, and in - // [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other - // steps. - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). Cannot begin or end with underscore - // or hyphen. Must consist of between 3 and 50 characters. - string step_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job definition. - oneof job_type { - // Job is a Hadoop job. - HadoopJob hadoop_job = 2; - - // Job is a Spark job. - SparkJob spark_job = 3; - - // Job is a Pyspark job. - PySparkJob pyspark_job = 4; - - // Job is a Hive job. - HiveJob hive_job = 5; - - // Job is a Pig job. - PigJob pig_job = 6; - - // Job is a SparkSql job. - SparkSqlJob spark_sql_job = 7; - } - - // Optional. The labels to associate with this job. - // - // Label keys must be between 1 and 63 characters long, and must conform to - // the following regular expression: - // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} - // - // Label values must be between 1 and 63 characters long, and must conform to - // the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} - // - // No more than 32 labels can be associated with a given job. - map labels = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job scheduling configuration. - JobScheduling scheduling = 9 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The optional list of prerequisite job step_ids. - // If not specified, the job will start at the beginning of workflow. - repeated string prerequisite_step_ids = 10 [(google.api.field_behavior) = OPTIONAL]; -} - -// A configurable parameter that replaces one or more fields in the template. -// Parameterizable fields: -// - Labels -// - File uris -// - Job properties -// - Job arguments -// - Script variables -// - Main class (in HadoopJob and SparkJob) -// - Zone (in ClusterSelector) -message TemplateParameter { - // Required. Parameter name. - // The parameter name is used as the key, and paired with the - // parameter value, which are passed to the template when the template - // is instantiated. - // The name must contain only capital letters (A-Z), numbers (0-9), and - // underscores (_), and must not start with a number. The maximum length is - // 40 characters. - string name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. Paths to all fields that the parameter replaces. - // A field is allowed to appear in at most one parameter's list of field - // paths. - // - // A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - // For example, a field path that references the zone field of a workflow - // template's cluster selector would be specified as - // `placement.clusterSelector.zone`. - // - // Also, field paths can reference fields using the following syntax: - // - // * Values in maps can be referenced by key: - // * labels['key'] - // * placement.clusterSelector.clusterLabels['key'] - // * placement.managedCluster.labels['key'] - // * placement.clusterSelector.clusterLabels['key'] - // * jobs['step-id'].labels['key'] - // - // * Jobs in the jobs list can be referenced by step-id: - // * jobs['step-id'].hadoopJob.mainJarFileUri - // * jobs['step-id'].hiveJob.queryFileUri - // * jobs['step-id'].pySparkJob.mainPythonFileUri - // * jobs['step-id'].hadoopJob.jarFileUris[0] - // * jobs['step-id'].hadoopJob.archiveUris[0] - // * jobs['step-id'].hadoopJob.fileUris[0] - // * jobs['step-id'].pySparkJob.pythonFileUris[0] - // - // * Items in repeated fields can be referenced by a zero-based index: - // * jobs['step-id'].sparkJob.args[0] - // - // * Other examples: - // * jobs['step-id'].hadoopJob.properties['key'] - // * jobs['step-id'].hadoopJob.args[0] - // * jobs['step-id'].hiveJob.scriptVariables['key'] - // * jobs['step-id'].hadoopJob.mainJarFileUri - // * placement.clusterSelector.zone - // - // It may not be possible to parameterize maps and repeated fields in their - // entirety since only individual map values and individual items in repeated - // fields can be referenced. For example, the following field paths are - // invalid: - // - // - placement.clusterSelector.clusterLabels - // - jobs['step-id'].sparkJob.args - repeated string fields = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Brief description of the parameter. - // Must not exceed 1024 characters. - string description = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Validation rules to be applied to this parameter's value. - ParameterValidation validation = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// Configuration for parameter validation. -message ParameterValidation { - // Required. The type of validation to be performed. - oneof validation_type { - // Validation based on regular expressions. - RegexValidation regex = 1; - - // Validation based on a list of allowed values. - ValueValidation values = 2; - } -} - -// Validation based on regular expressions. -message RegexValidation { - // Required. RE2 regular expressions used to validate the parameter's value. - // The value must match the regex in its entirety (substring - // matches are not sufficient). - repeated string regexes = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// Validation based on a list of allowed values. -message ValueValidation { - // Required. List of allowed values for the parameter. - repeated string values = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// A Dataproc workflow template resource. -message WorkflowMetadata { - // The operation state. - enum State { - // Unused. - UNKNOWN = 0; - - // The operation has been created. - PENDING = 1; - - // The operation is running. - RUNNING = 2; - - // The operation is done; either cancelled or completed. - DONE = 3; - } - - // Output only. The resource name of the workflow template as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates`, the resource name of the - // template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates`, the resource name of the - // template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string template = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The version of template at the time of - // workflow instantiation. - int32 version = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The create cluster operation metadata. - ClusterOperation create_cluster = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The workflow graph. - WorkflowGraph graph = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The delete cluster operation metadata. - ClusterOperation delete_cluster = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The workflow state. - State state = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The name of the target cluster. - string cluster_name = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Map from parameter names to values that were used for those parameters. - map parameters = 8; - - // Output only. Workflow start time. - google.protobuf.Timestamp start_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Workflow end time. - google.protobuf.Timestamp end_time = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The UUID of target cluster. - string cluster_uuid = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The cluster operation triggered by a workflow. -message ClusterOperation { - // Output only. The id of the cluster operation. - string operation_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Error, if operation failed. - string error = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Indicates the operation is done. - bool done = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The workflow graph. -message WorkflowGraph { - // Output only. The workflow nodes. - repeated WorkflowNode nodes = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The workflow node. -message WorkflowNode { - // The workflow node state. - enum NodeState { - // State is unspecified. - NODE_STATE_UNSPECIFIED = 0; - - // The node is awaiting prerequisite node to finish. - BLOCKED = 1; - - // The node is runnable but not running. - RUNNABLE = 2; - - // The node is running. - RUNNING = 3; - - // The node completed successfully. - COMPLETED = 4; - - // The node failed. A node can be marked FAILED because - // its ancestor or peer failed. - FAILED = 5; - } - - // Output only. The name of the node. - string step_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Node's prerequisite nodes. - repeated string prerequisite_step_ids = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The job id; populated after the node enters RUNNING state. - string job_id = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The node state. - NodeState state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The error detail. - string error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to create a workflow template. -message CreateWorkflowTemplateRequest { - // Required. The resource name of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates,create`, the resource name of the - // region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.workflowTemplates.create`, the resource name of - // the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Required. The Dataproc workflow template to create. - WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to fetch a workflow template. -message GetWorkflowTemplateRequest { - // Required. The resource name of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates.get`, the resource name of the - // template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates.get`, the resource name of the - // template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Optional. The version of workflow template to retrieve. Only previously - // instantiated versions can be retrieved. - // - // If unspecified, retrieves the current version. - int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to instantiate a workflow template. -message InstantiateWorkflowTemplateRequest { - // Required. The resource name of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates.instantiate`, the resource name - // of the template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates.instantiate`, the resource name - // of the template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Optional. The version of workflow template to instantiate. If specified, - // the workflow will be instantiated only if the current version of - // the workflow template has the supplied version. - // - // This option cannot be used to instantiate a previous version of - // workflow template. - int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A tag that prevents multiple concurrent workflow - // instances with the same tag from running. This mitigates risk of - // concurrent instances started due to retries. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The tag must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Map from parameter names to values that should be used for those - // parameters. Values may not exceed 100 characters. - map parameters = 6 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to instantiate an inline workflow template. -message InstantiateInlineWorkflowTemplateRequest { - // Required. The resource name of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates,instantiateinline`, the resource - // name of the region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.workflowTemplates.instantiateinline`, the - // resource name of the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Required. The workflow template to instantiate. - WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A tag that prevents multiple concurrent workflow - // instances with the same tag from running. This mitigates risk of - // concurrent instances started due to retries. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The tag must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to update a workflow template. -message UpdateWorkflowTemplateRequest { - // Required. The updated workflow template. - // - // The `template.version` field must match the current version. - WorkflowTemplate template = 1 [ - (google.api.field_behavior) = REQUIRED - ]; -} - -// A request to list workflow templates in a project. -message ListWorkflowTemplatesRequest { - // Required. The resource name of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates,list`, the resource - // name of the region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.workflowTemplates.list`, the - // resource name of the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Optional. The maximum number of results to return in each response. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The page token, returned by a previous call, to request the - // next page of results. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// A response to a request to list workflow templates in a project. -message ListWorkflowTemplatesResponse { - // Output only. WorkflowTemplates list. - repeated WorkflowTemplate templates = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. This token is included in the response if there are more - // results to fetch. To fetch additional results, provide this value as the - // page_token in a subsequent ListWorkflowTemplatesRequest. - string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to delete a workflow template. -// -// Currently started workflows will remain running. -message DeleteWorkflowTemplateRequest { - // Required. The resource name of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates.delete`, the resource name - // of the template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates.instantiate`, the resource name - // of the template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Optional. The version of workflow template to delete. If specified, - // will only delete the template if the current server version matches - // specified version. - int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/dataproc/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py deleted file mode 100644 index 0c3125b12d44..000000000000 --- a/dataproc/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py +++ /dev/null @@ -1,3354 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dataproc_v1/proto/workflow_templates.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.dataproc_v1.proto import ( - clusters_pb2 as google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2, -) -from google.cloud.dataproc_v1.proto import ( - jobs_pb2 as google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dataproc_v1/proto/workflow_templates.proto", - package="google.cloud.dataproc.v1", - syntax="proto3", - serialized_options=_b( - "\n\034com.google.cloud.dataproc.v1B\026WorkflowTemplatesProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" - ), - serialized_pb=_b( - '\n7google/cloud/dataproc_v1/proto/workflow_templates.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a-google/cloud/dataproc_v1/proto/clusters.proto\x1a)google/cloud/dataproc_v1/proto/jobs.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xcd\x05\n\x10WorkflowTemplate\x12\x0f\n\x02id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07version\x18\x03 \x01(\x05\x42\x03\xe0\x41\x01\x12\x34\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12K\n\x06labels\x18\x06 \x03(\x0b\x32\x36.google.cloud.dataproc.v1.WorkflowTemplate.LabelsEntryB\x03\xe0\x41\x01\x12K\n\tplacement\x18\x07 \x01(\x0b\x32\x33.google.cloud.dataproc.v1.WorkflowTemplatePlacementB\x03\xe0\x41\x02\x12\x37\n\x04jobs\x18\x08 \x03(\x0b\x32$.google.cloud.dataproc.v1.OrderedJobB\x03\xe0\x41\x02\x12\x44\n\nparameters\x18\t \x03(\x0b\x32+.google.cloud.dataproc.v1.TemplateParameterB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:\xca\x01\xea\x41\xc6\x01\n(dataproc.googleapis.com/WorkflowTemplate\x12Iprojects/{project}/regions/{region}/workflowTemplates/{workflow_template}\x12Mprojects/{project}/locations/{location}/workflowTemplates/{workflow_template} \x01"\xb4\x01\n\x19WorkflowTemplatePlacement\x12\x43\n\x0fmanaged_cluster\x18\x01 \x01(\x0b\x32(.google.cloud.dataproc.v1.ManagedClusterH\x00\x12\x45\n\x10\x63luster_selector\x18\x02 \x01(\x0b\x32).google.cloud.dataproc.v1.ClusterSelectorH\x00\x42\x0b\n\tplacement"\xe3\x01\n\x0eManagedCluster\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12<\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfigB\x03\xe0\x41\x02\x12I\n\x06labels\x18\x04 \x03(\x0b\x32\x34.google.cloud.dataproc.v1.ManagedCluster.LabelsEntryB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb5\x01\n\x0f\x43lusterSelector\x12\x11\n\x04zone\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12Y\n\x0e\x63luster_labels\x18\x02 \x03(\x0b\x32<.google.cloud.dataproc.v1.ClusterSelector.ClusterLabelsEntryB\x03\xe0\x41\x02\x1a\x34\n\x12\x43lusterLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xe7\x04\n\nOrderedJob\x12\x14\n\x07step_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\nhadoop_job\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.HadoopJobH\x00\x12\x37\n\tspark_job\x18\x03 \x01(\x0b\x32".google.cloud.dataproc.v1.SparkJobH\x00\x12;\n\x0bpyspark_job\x18\x04 \x01(\x0b\x32$.google.cloud.dataproc.v1.PySparkJobH\x00\x12\x35\n\x08hive_job\x18\x05 \x01(\x0b\x32!.google.cloud.dataproc.v1.HiveJobH\x00\x12\x33\n\x07pig_job\x18\x06 \x01(\x0b\x32 .google.cloud.dataproc.v1.PigJobH\x00\x12>\n\rspark_sql_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobH\x00\x12\x45\n\x06labels\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.OrderedJob.LabelsEntryB\x03\xe0\x41\x01\x12@\n\nscheduling\x18\t \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobSchedulingB\x03\xe0\x41\x01\x12"\n\x15prerequisite_step_ids\x18\n \x03(\tB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x9d\x01\n\x11TemplateParameter\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ields\x18\x02 \x03(\tB\x03\xe0\x41\x02\x12\x18\n\x0b\x64\x65scription\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x46\n\nvalidation\x18\x04 \x01(\x0b\x32-.google.cloud.dataproc.v1.ParameterValidationB\x03\xe0\x41\x01"\xa1\x01\n\x13ParameterValidation\x12:\n\x05regex\x18\x01 \x01(\x0b\x32).google.cloud.dataproc.v1.RegexValidationH\x00\x12;\n\x06values\x18\x02 \x01(\x0b\x32).google.cloud.dataproc.v1.ValueValidationH\x00\x42\x11\n\x0fvalidation_type"\'\n\x0fRegexValidation\x12\x14\n\x07regexes\x18\x01 \x03(\tB\x03\xe0\x41\x02"&\n\x0fValueValidation\x12\x13\n\x06values\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xaf\x05\n\x10WorkflowMetadata\x12\x15\n\x08template\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x03\x12G\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperationB\x03\xe0\x41\x03\x12;\n\x05graph\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.WorkflowGraphB\x03\xe0\x41\x03\x12G\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperationB\x03\xe0\x41\x03\x12\x44\n\x05state\x18\x06 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowMetadata.StateB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_name\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12N\n\nparameters\x18\x08 \x03(\x0b\x32:.google.cloud.dataproc.v1.WorkflowMetadata.ParametersEntry\x12\x33\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x0b \x01(\tB\x03\xe0\x41\x03\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"T\n\x10\x43lusterOperation\x12\x19\n\x0coperation_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04\x64one\x18\x03 \x01(\x08\x42\x03\xe0\x41\x03"K\n\rWorkflowGraph\x12:\n\x05nodes\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1.WorkflowNodeB\x03\xe0\x41\x03"\xa3\x02\n\x0cWorkflowNode\x12\x14\n\x07step_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12"\n\x15prerequisite_step_ids\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12\x44\n\x05state\x18\x05 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowNode.NodeStateB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x06 \x01(\tB\x03\xe0\x41\x03"j\n\tNodeState\x12\x1a\n\x16NODE_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"\xa4\x01\n\x1d\x43reateWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x02"r\n\x1aGetWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"\xad\x02\n"InstantiateWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x65\n\nparameters\x18\x06 \x03(\x0b\x32L.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.ParametersEntryB\x03\xe0\x41\x01\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xc8\x01\n(InstantiateInlineWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x03 \x01(\tB\x03\xe0\x41\x01"b\n\x1dUpdateWorkflowTemplateRequest\x12\x41\n\x08template\x18\x01 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x02"\x91\x01\n\x1cListWorkflowTemplatesRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x81\x01\n\x1dListWorkflowTemplatesResponse\x12\x42\n\ttemplates\x18\x01 \x03(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"u\n\x1d\x44\x65leteWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x32\xe6\x10\n\x17WorkflowTemplateService\x12\x9b\x02\n\x16\x43reateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\x9b\x01\x82\xd3\xe4\x93\x02\x82\x01"5/v1/{parent=projects/*/locations/*}/workflowTemplates:\x08templateZ?"3/v1/{parent=projects/*/regions/*}/workflowTemplates:\x08template\xda\x41\x0fparent,template\x12\xf4\x01\n\x13GetWorkflowTemplate\x12\x34.google.cloud.dataproc.v1.GetWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"{\x82\xd3\xe4\x93\x02n\x12\x35/v1/{name=projects/*/locations/*/workflowTemplates/*}Z5\x12\x33/v1/{name=projects/*/regions/*/workflowTemplates/*}\xda\x41\x04name\x12\xd5\x02\n\x1bInstantiateWorkflowTemplate\x12<.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xd8\x01\x82\xd3\xe4\x93\x02\x8c\x01"A/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*ZD"?/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*\xda\x41\x04name\xda\x41\x0fname,parameters\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xf4\x02\n!InstantiateInlineWorkflowTemplate\x12\x42.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xeb\x01\x82\xd3\xe4\x93\x02\xa6\x01"G/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZQ"E/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\xda\x41\x0fparent,template\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xa6\x02\n\x16UpdateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\xa6\x01\x82\xd3\xe4\x93\x02\x94\x01\x1a>/v1/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08templateZH\x1a`__. Label **values** - may be empty, but, if present, must contain 1 to 63 - characters, and must conform to `RFC 1035 - `__. No more than 32 - labels can be associated with a template. - placement: - Required. WorkflowTemplate scheduling information. - jobs: - Required. The Directed Acyclic Graph of Jobs to submit. - parameters: - Optional. emplate parameters whose values are substituted into - the template. Values for parameters must be provided when the - template is instantiated. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.WorkflowTemplate) - ), -) -_sym_db.RegisterMessage(WorkflowTemplate) -_sym_db.RegisterMessage(WorkflowTemplate.LabelsEntry) - -WorkflowTemplatePlacement = _reflection.GeneratedProtocolMessageType( - "WorkflowTemplatePlacement", - (_message.Message,), - dict( - DESCRIPTOR=_WORKFLOWTEMPLATEPLACEMENT, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""Specifies workflow execution target. - - Either ``managed_cluster`` or ``cluster_selector`` is required. - - - Attributes: - placement: - Required. Specifies where workflow executes; either on a - managed cluster or an existing cluster chosen by labels. - managed_cluster: - A cluster that is managed by the workflow. - cluster_selector: - Optional. A selector that chooses target cluster for jobs - based on metadata. The selector is evaluated at the time each - job is submitted. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.WorkflowTemplatePlacement) - ), -) -_sym_db.RegisterMessage(WorkflowTemplatePlacement) - -ManagedCluster = _reflection.GeneratedProtocolMessageType( - "ManagedCluster", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_MANAGEDCLUSTER_LABELSENTRY, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ManagedCluster.LabelsEntry) - ), - ), - DESCRIPTOR=_MANAGEDCLUSTER, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""Cluster that is managed by the workflow. - - - Attributes: - cluster_name: - Required. The cluster name prefix. A unique cluster name will - be formed by appending a random suffix. The name must contain - only lower-case letters (a-z), numbers (0-9), and hyphens (-). - Must begin with a letter. Cannot begin or end with hyphen. - Must consist of between 2 and 35 characters. - config: - Required. The cluster configuration. - labels: - Optional. The labels to associate with this cluster. Label - keys must be between 1 and 63 characters long. Label values must be between - 1 and 63 characters long. No more than 32 - labels can be associated with a given cluster. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ManagedCluster) - ), -) -_sym_db.RegisterMessage(ManagedCluster) -_sym_db.RegisterMessage(ManagedCluster.LabelsEntry) - -ClusterSelector = _reflection.GeneratedProtocolMessageType( - "ClusterSelector", - (_message.Message,), - dict( - ClusterLabelsEntry=_reflection.GeneratedProtocolMessageType( - "ClusterLabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTERSELECTOR_CLUSTERLABELSENTRY, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterSelector.ClusterLabelsEntry) - ), - ), - DESCRIPTOR=_CLUSTERSELECTOR, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A selector that chooses target cluster for jobs based on - metadata. - - - Attributes: - zone: - Optional. The zone where workflow process executes. This - parameter does not affect the selection of the cluster. If - unspecified, the zone of the first cluster matching the - selector is used. - cluster_labels: - Required. The cluster labels. Cluster must have all labels to - match. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterSelector) - ), -) -_sym_db.RegisterMessage(ClusterSelector) -_sym_db.RegisterMessage(ClusterSelector.ClusterLabelsEntry) - -OrderedJob = _reflection.GeneratedProtocolMessageType( - "OrderedJob", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_ORDEREDJOB_LABELSENTRY, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.OrderedJob.LabelsEntry) - ), - ), - DESCRIPTOR=_ORDEREDJOB, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A job executed by the workflow. - - - Attributes: - step_id: - Required. The step id. The id must be unique among all jobs - within the template. The step id is used as prefix for job - id, as job ``goog-dataproc-workflow-step-id`` label, and in [p - rerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prereq - uisite\_step\_ids] field from other steps. The id must - contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). Cannot begin or end with underscore or - hyphen. Must consist of between 3 and 50 characters. - job_type: - Required. The job definition. - hadoop_job: - Job is a Hadoop job. - spark_job: - Job is a Spark job. - pyspark_job: - Job is a Pyspark job. - hive_job: - Job is a Hive job. - pig_job: - Job is a Pig job. - spark_sql_job: - Job is a SparkSql job. - labels: - Optional. The labels to associate with this job. Label keys - must be between 1 and 63 characters long. Label values must be between - 1 and 63 characters long. No more than 32 labels can be - associated with a given job. - scheduling: - Optional. Job scheduling configuration. - prerequisite_step_ids: - Optional. The optional list of prerequisite job step\_ids. If - not specified, the job will start at the beginning of - workflow. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.OrderedJob) - ), -) -_sym_db.RegisterMessage(OrderedJob) -_sym_db.RegisterMessage(OrderedJob.LabelsEntry) - -TemplateParameter = _reflection.GeneratedProtocolMessageType( - "TemplateParameter", - (_message.Message,), - dict( - DESCRIPTOR=_TEMPLATEPARAMETER, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A configurable parameter that replaces one or more fields - in the template. Parameterizable fields: - Labels - File uris - Job - properties - Job arguments - Script variables - Main class (in HadoopJob - and SparkJob) - Zone (in ClusterSelector) - - - Attributes: - name: - Required. Parameter name. The parameter name is used as the - key, and paired with the parameter value, which are passed to - the template when the template is instantiated. The name must - contain only capital letters (A-Z), numbers (0-9), and - underscores (\_), and must not start with a number. The - maximum length is 40 characters. - fields: - Required. Paths to all fields that the parameter replaces. A - field is allowed to appear in at most one parameter's list of - field paths. A field path is similar in syntax to a - [google.protobuf.FieldMask][google.protobuf.FieldMask]. For - example, a field path that references the zone field of a - workflow template's cluster selector would be specified as - ``placement.clusterSelector.zone``. Also, field paths can - reference fields using the following syntax: - Values in - maps can be referenced by key: - labels['key'] - - placement.clusterSelector.clusterLabels['key'] - - placement.managedCluster.labels['key'] - - placement.clusterSelector.clusterLabels['key'] - - jobs['step-id'].labels['key'] - Jobs in the jobs list can be - referenced by step-id: - jobs['step- - id'].hadoopJob.mainJarFileUri - jobs['step- - id'].hiveJob.queryFileUri - jobs['step- - id'].pySparkJob.mainPythonFileUri - jobs['step- - id'].hadoopJob.jarFileUris[0] - jobs['step- - id'].hadoopJob.archiveUris[0] - jobs['step- - id'].hadoopJob.fileUris[0] - jobs['step- - id'].pySparkJob.pythonFileUris[0] - Items in repeated fields - can be referenced by a zero-based index: - jobs['step- - id'].sparkJob.args[0] - Other examples: - jobs['step- - id'].hadoopJob.properties['key'] - jobs['step- - id'].hadoopJob.args[0] - jobs['step- - id'].hiveJob.scriptVariables['key'] - jobs['step- - id'].hadoopJob.mainJarFileUri - - placement.clusterSelector.zone It may not be possible to - parameterize maps and repeated fields in their entirety since - only individual map values and individual items in repeated - fields can be referenced. For example, the following field - paths are invalid: - placement.clusterSelector.clusterLabels - - jobs['step-id'].sparkJob.args - description: - Optional. Brief description of the parameter. Must not exceed - 1024 characters. - validation: - Optional. Validation rules to be applied to this parameter's - value. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.TemplateParameter) - ), -) -_sym_db.RegisterMessage(TemplateParameter) - -ParameterValidation = _reflection.GeneratedProtocolMessageType( - "ParameterValidation", - (_message.Message,), - dict( - DESCRIPTOR=_PARAMETERVALIDATION, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""Configuration for parameter validation. - - - Attributes: - validation_type: - Required. The type of validation to be performed. - regex: - Validation based on regular expressions. - values: - Validation based on a list of allowed values. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ParameterValidation) - ), -) -_sym_db.RegisterMessage(ParameterValidation) - -RegexValidation = _reflection.GeneratedProtocolMessageType( - "RegexValidation", - (_message.Message,), - dict( - DESCRIPTOR=_REGEXVALIDATION, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""Validation based on regular expressions. - - - Attributes: - regexes: - Required. RE2 regular expressions used to validate the - parameter's value. The value must match the regex in its - entirety (substring matches are not sufficient). - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.RegexValidation) - ), -) -_sym_db.RegisterMessage(RegexValidation) - -ValueValidation = _reflection.GeneratedProtocolMessageType( - "ValueValidation", - (_message.Message,), - dict( - DESCRIPTOR=_VALUEVALIDATION, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""Validation based on a list of allowed values. - - - Attributes: - values: - Required. List of allowed values for the parameter. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ValueValidation) - ), -) -_sym_db.RegisterMessage(ValueValidation) - -WorkflowMetadata = _reflection.GeneratedProtocolMessageType( - "WorkflowMetadata", - (_message.Message,), - dict( - ParametersEntry=_reflection.GeneratedProtocolMessageType( - "ParametersEntry", - (_message.Message,), - dict( - DESCRIPTOR=_WORKFLOWMETADATA_PARAMETERSENTRY, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.WorkflowMetadata.ParametersEntry) - ), - ), - DESCRIPTOR=_WORKFLOWMETADATA, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A Dataproc workflow template resource. - - - Attributes: - template: - Output only. The resource name of the workflow template as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates``, the resource name of - the template has the following format: ``projects/{proje - ct_id}/regions/{region}/workflowTemplates/{template_id}`` - - For ``projects.locations.workflowTemplates``, the resource - name of the template has the following format: ``project - s/{project_id}/locations/{location}/workflowTemplates/{templat - e_id}`` - version: - Output only. The version of template at the time of workflow - instantiation. - create_cluster: - Output only. The create cluster operation metadata. - graph: - Output only. The workflow graph. - delete_cluster: - Output only. The delete cluster operation metadata. - state: - Output only. The workflow state. - cluster_name: - Output only. The name of the target cluster. - parameters: - Map from parameter names to values that were used for those - parameters. - start_time: - Output only. Workflow start time. - end_time: - Output only. Workflow end time. - cluster_uuid: - Output only. The UUID of target cluster. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.WorkflowMetadata) - ), -) -_sym_db.RegisterMessage(WorkflowMetadata) -_sym_db.RegisterMessage(WorkflowMetadata.ParametersEntry) - -ClusterOperation = _reflection.GeneratedProtocolMessageType( - "ClusterOperation", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTEROPERATION, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""The cluster operation triggered by a workflow. - - - Attributes: - operation_id: - Output only. The id of the cluster operation. - error: - Output only. Error, if operation failed. - done: - Output only. Indicates the operation is done. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterOperation) - ), -) -_sym_db.RegisterMessage(ClusterOperation) - -WorkflowGraph = _reflection.GeneratedProtocolMessageType( - "WorkflowGraph", - (_message.Message,), - dict( - DESCRIPTOR=_WORKFLOWGRAPH, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""The workflow graph. - - - Attributes: - nodes: - Output only. The workflow nodes. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.WorkflowGraph) - ), -) -_sym_db.RegisterMessage(WorkflowGraph) - -WorkflowNode = _reflection.GeneratedProtocolMessageType( - "WorkflowNode", - (_message.Message,), - dict( - DESCRIPTOR=_WORKFLOWNODE, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""The workflow node. - - - Attributes: - step_id: - Output only. The name of the node. - prerequisite_step_ids: - Output only. Node's prerequisite nodes. - job_id: - Output only. The job id; populated after the node enters - RUNNING state. - state: - Output only. The node state. - error: - Output only. The error detail. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.WorkflowNode) - ), -) -_sym_db.RegisterMessage(WorkflowNode) - -CreateWorkflowTemplateRequest = _reflection.GeneratedProtocolMessageType( - "CreateWorkflowTemplateRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATEWORKFLOWTEMPLATEREQUEST, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A request to create a workflow template. - - - Attributes: - parent: - Required. The resource name of the region or location, as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates,create``, the resource - name of the region has the following format: - ``projects/{project_id}/regions/{region}`` - For - ``projects.locations.workflowTemplates.create``, the resource - name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - template: - Required. The Dataproc workflow template to create. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.CreateWorkflowTemplateRequest) - ), -) -_sym_db.RegisterMessage(CreateWorkflowTemplateRequest) - -GetWorkflowTemplateRequest = _reflection.GeneratedProtocolMessageType( - "GetWorkflowTemplateRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETWORKFLOWTEMPLATEREQUEST, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A request to fetch a workflow template. - - - Attributes: - name: - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates.get``, the resource name - of the template has the following format: ``projects/{pr - oject_id}/regions/{region}/workflowTemplates/{template_id}`` - - For ``projects.locations.workflowTemplates.get``, the - resource name of the template has the following format: - ``projects/{project_id}/locations/{location}/workflowTemplates - /{template_id}`` - version: - Optional. The version of workflow template to retrieve. Only - previously instantiated versions can be retrieved. If - unspecified, retrieves the current version. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.GetWorkflowTemplateRequest) - ), -) -_sym_db.RegisterMessage(GetWorkflowTemplateRequest) - -InstantiateWorkflowTemplateRequest = _reflection.GeneratedProtocolMessageType( - "InstantiateWorkflowTemplateRequest", - (_message.Message,), - dict( - ParametersEntry=_reflection.GeneratedProtocolMessageType( - "ParametersEntry", - (_message.Message,), - dict( - DESCRIPTOR=_INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.ParametersEntry) - ), - ), - DESCRIPTOR=_INSTANTIATEWORKFLOWTEMPLATEREQUEST, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A request to instantiate a workflow template. - - - Attributes: - name: - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates.instantiate``, the - resource name of the template has the following format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{te - mplate_id}`` - For - ``projects.locations.workflowTemplates.instantiate``, the - resource name of the template has the following format: ``p - rojects/{project_id}/locations/{location}/workflowTemplates/{t - emplate_id}`` - version: - Optional. The version of workflow template to instantiate. If - specified, the workflow will be instantiated only if the - current version of the workflow template has the supplied - version. This option cannot be used to instantiate a previous - version of workflow template. - request_id: - Optional. A tag that prevents multiple concurrent workflow - instances with the same tag from running. This mitigates risk - of concurrent instances started due to retries. It is - recommended to always set this value to a `UUID `__. The tag - must contain only letters (a-z, A-Z), numbers (0-9), - underscores (\_), and hyphens (-). The maximum length is 40 - characters. - parameters: - Optional. Map from parameter names to values that should be - used for those parameters. Values may not exceed 100 - characters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest) - ), -) -_sym_db.RegisterMessage(InstantiateWorkflowTemplateRequest) -_sym_db.RegisterMessage(InstantiateWorkflowTemplateRequest.ParametersEntry) - -InstantiateInlineWorkflowTemplateRequest = _reflection.GeneratedProtocolMessageType( - "InstantiateInlineWorkflowTemplateRequest", - (_message.Message,), - dict( - DESCRIPTOR=_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A request to instantiate an inline workflow template. - - - Attributes: - parent: - Required. The resource name of the region or location, as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates,instantiateinline``, the - resource name of the region has the following format: - ``projects/{project_id}/regions/{region}`` - For - ``projects.locations.workflowTemplates.instantiateinline``, - the resource name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - template: - Required. The workflow template to instantiate. - request_id: - Optional. A tag that prevents multiple concurrent workflow - instances with the same tag from running. This mitigates risk - of concurrent instances started due to retries. It is - recommended to always set this value to a `UUID `__. The tag - must contain only letters (a-z, A-Z), numbers (0-9), - underscores (\_), and hyphens (-). The maximum length is 40 - characters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest) - ), -) -_sym_db.RegisterMessage(InstantiateInlineWorkflowTemplateRequest) - -UpdateWorkflowTemplateRequest = _reflection.GeneratedProtocolMessageType( - "UpdateWorkflowTemplateRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATEWORKFLOWTEMPLATEREQUEST, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A request to update a workflow template. - - - Attributes: - template: - Required. The updated workflow template. The - ``template.version`` field must match the current version. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest) - ), -) -_sym_db.RegisterMessage(UpdateWorkflowTemplateRequest) - -ListWorkflowTemplatesRequest = _reflection.GeneratedProtocolMessageType( - "ListWorkflowTemplatesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTWORKFLOWTEMPLATESREQUEST, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A request to list workflow templates in a project. - - - Attributes: - parent: - Required. The resource name of the region or location, as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates,list``, the resource name - of the region has the following format: - ``projects/{project_id}/regions/{region}`` - For - ``projects.locations.workflowTemplates.list``, the resource - name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - page_size: - Optional. The maximum number of results to return in each - response. - page_token: - Optional. The page token, returned by a previous call, to - request the next page of results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ListWorkflowTemplatesRequest) - ), -) -_sym_db.RegisterMessage(ListWorkflowTemplatesRequest) - -ListWorkflowTemplatesResponse = _reflection.GeneratedProtocolMessageType( - "ListWorkflowTemplatesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTWORKFLOWTEMPLATESRESPONSE, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A response to a request to list workflow templates in a - project. - - - Attributes: - templates: - Output only. WorkflowTemplates list. - next_page_token: - Output only. This token is included in the response if there - are more results to fetch. To fetch additional results, - provide this value as the page\_token in a subsequent - ListWorkflowTemplatesRequest. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ListWorkflowTemplatesResponse) - ), -) -_sym_db.RegisterMessage(ListWorkflowTemplatesResponse) - -DeleteWorkflowTemplateRequest = _reflection.GeneratedProtocolMessageType( - "DeleteWorkflowTemplateRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEWORKFLOWTEMPLATEREQUEST, - __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A request to delete a workflow template. - - Currently started workflows will remain running. - - - Attributes: - name: - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates.delete``, the resource - name of the template has the following format: ``project - s/{project_id}/regions/{region}/workflowTemplates/{template_id - }`` - For - ``projects.locations.workflowTemplates.instantiate``, the - resource name of the template has the following format: ``p - rojects/{project_id}/locations/{location}/workflowTemplates/{t - emplate_id}`` - version: - Optional. The version of workflow template to delete. If - specified, will only delete the template if the current server - version matches specified version. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest) - ), -) -_sym_db.RegisterMessage(DeleteWorkflowTemplateRequest) - - -DESCRIPTOR._options = None -_WORKFLOWTEMPLATE_LABELSENTRY._options = None -_WORKFLOWTEMPLATE.fields_by_name["id"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["name"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["version"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["create_time"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["update_time"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["labels"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["placement"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["jobs"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["parameters"]._options = None -_WORKFLOWTEMPLATE._options = None -_MANAGEDCLUSTER_LABELSENTRY._options = None -_MANAGEDCLUSTER.fields_by_name["cluster_name"]._options = None -_MANAGEDCLUSTER.fields_by_name["config"]._options = None -_MANAGEDCLUSTER.fields_by_name["labels"]._options = None -_CLUSTERSELECTOR_CLUSTERLABELSENTRY._options = None -_CLUSTERSELECTOR.fields_by_name["zone"]._options = None -_CLUSTERSELECTOR.fields_by_name["cluster_labels"]._options = None -_ORDEREDJOB_LABELSENTRY._options = None -_ORDEREDJOB.fields_by_name["step_id"]._options = None -_ORDEREDJOB.fields_by_name["labels"]._options = None -_ORDEREDJOB.fields_by_name["scheduling"]._options = None -_ORDEREDJOB.fields_by_name["prerequisite_step_ids"]._options = None -_TEMPLATEPARAMETER.fields_by_name["name"]._options = None -_TEMPLATEPARAMETER.fields_by_name["fields"]._options = None -_TEMPLATEPARAMETER.fields_by_name["description"]._options = None -_TEMPLATEPARAMETER.fields_by_name["validation"]._options = None -_REGEXVALIDATION.fields_by_name["regexes"]._options = None -_VALUEVALIDATION.fields_by_name["values"]._options = None -_WORKFLOWMETADATA_PARAMETERSENTRY._options = None -_WORKFLOWMETADATA.fields_by_name["template"]._options = None -_WORKFLOWMETADATA.fields_by_name["version"]._options = None -_WORKFLOWMETADATA.fields_by_name["create_cluster"]._options = None -_WORKFLOWMETADATA.fields_by_name["graph"]._options = None -_WORKFLOWMETADATA.fields_by_name["delete_cluster"]._options = None -_WORKFLOWMETADATA.fields_by_name["state"]._options = None -_WORKFLOWMETADATA.fields_by_name["cluster_name"]._options = None -_WORKFLOWMETADATA.fields_by_name["start_time"]._options = None -_WORKFLOWMETADATA.fields_by_name["end_time"]._options = None -_WORKFLOWMETADATA.fields_by_name["cluster_uuid"]._options = None -_CLUSTEROPERATION.fields_by_name["operation_id"]._options = None -_CLUSTEROPERATION.fields_by_name["error"]._options = None -_CLUSTEROPERATION.fields_by_name["done"]._options = None -_WORKFLOWGRAPH.fields_by_name["nodes"]._options = None -_WORKFLOWNODE.fields_by_name["step_id"]._options = None -_WORKFLOWNODE.fields_by_name["prerequisite_step_ids"]._options = None -_WORKFLOWNODE.fields_by_name["job_id"]._options = None -_WORKFLOWNODE.fields_by_name["state"]._options = None -_WORKFLOWNODE.fields_by_name["error"]._options = None -_CREATEWORKFLOWTEMPLATEREQUEST.fields_by_name["parent"]._options = None -_CREATEWORKFLOWTEMPLATEREQUEST.fields_by_name["template"]._options = None -_GETWORKFLOWTEMPLATEREQUEST.fields_by_name["name"]._options = None -_GETWORKFLOWTEMPLATEREQUEST.fields_by_name["version"]._options = None -_INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY._options = None -_INSTANTIATEWORKFLOWTEMPLATEREQUEST.fields_by_name["name"]._options = None -_INSTANTIATEWORKFLOWTEMPLATEREQUEST.fields_by_name["version"]._options = None -_INSTANTIATEWORKFLOWTEMPLATEREQUEST.fields_by_name["request_id"]._options = None -_INSTANTIATEWORKFLOWTEMPLATEREQUEST.fields_by_name["parameters"]._options = None -_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST.fields_by_name["parent"]._options = None -_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST.fields_by_name["template"]._options = None -_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST.fields_by_name["request_id"]._options = None -_UPDATEWORKFLOWTEMPLATEREQUEST.fields_by_name["template"]._options = None -_LISTWORKFLOWTEMPLATESREQUEST.fields_by_name["parent"]._options = None -_LISTWORKFLOWTEMPLATESREQUEST.fields_by_name["page_size"]._options = None -_LISTWORKFLOWTEMPLATESREQUEST.fields_by_name["page_token"]._options = None -_LISTWORKFLOWTEMPLATESRESPONSE.fields_by_name["templates"]._options = None -_LISTWORKFLOWTEMPLATESRESPONSE.fields_by_name["next_page_token"]._options = None -_DELETEWORKFLOWTEMPLATEREQUEST.fields_by_name["name"]._options = None -_DELETEWORKFLOWTEMPLATEREQUEST.fields_by_name["version"]._options = None - -_WORKFLOWTEMPLATESERVICE = _descriptor.ServiceDescriptor( - name="WorkflowTemplateService", - full_name="google.cloud.dataproc.v1.WorkflowTemplateService", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" - ), - serialized_start=5166, - serialized_end=7316, - methods=[ - _descriptor.MethodDescriptor( - name="CreateWorkflowTemplate", - full_name="google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate", - index=0, - containing_service=None, - input_type=_CREATEWORKFLOWTEMPLATEREQUEST, - output_type=_WORKFLOWTEMPLATE, - serialized_options=_b( - '\202\323\344\223\002\202\001"5/v1/{parent=projects/*/locations/*}/workflowTemplates:\010templateZ?"3/v1/{parent=projects/*/regions/*}/workflowTemplates:\010template\332A\017parent,template' - ), - ), - _descriptor.MethodDescriptor( - name="GetWorkflowTemplate", - full_name="google.cloud.dataproc.v1.WorkflowTemplateService.GetWorkflowTemplate", - index=1, - containing_service=None, - input_type=_GETWORKFLOWTEMPLATEREQUEST, - output_type=_WORKFLOWTEMPLATE, - serialized_options=_b( - "\202\323\344\223\002n\0225/v1/{name=projects/*/locations/*/workflowTemplates/*}Z5\0223/v1/{name=projects/*/regions/*/workflowTemplates/*}\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="InstantiateWorkflowTemplate", - full_name="google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate", - index=2, - containing_service=None, - input_type=_INSTANTIATEWORKFLOWTEMPLATEREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - '\202\323\344\223\002\214\001"A/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\001*ZD"?/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\001*\332A\004name\332A\017name,parameters\312A)\n\025google.protobuf.Empty\022\020WorkflowMetadata' - ), - ), - _descriptor.MethodDescriptor( - name="InstantiateInlineWorkflowTemplate", - full_name="google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateInlineWorkflowTemplate", - index=3, - containing_service=None, - input_type=_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - '\202\323\344\223\002\246\001"G/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\010templateZQ"E/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\010template\332A\017parent,template\312A)\n\025google.protobuf.Empty\022\020WorkflowMetadata' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateWorkflowTemplate", - full_name="google.cloud.dataproc.v1.WorkflowTemplateService.UpdateWorkflowTemplate", - index=4, - containing_service=None, - input_type=_UPDATEWORKFLOWTEMPLATEREQUEST, - output_type=_WORKFLOWTEMPLATE, - serialized_options=_b( - "\202\323\344\223\002\224\001\032>/v1/{template.name=projects/*/locations/*/workflowTemplates/*}:\010templateZH\032>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - >>> - >>> parent = client.region_path('[PROJECT]', '[REGION]') - >>> - >>> # TODO: Initialize `policy`: - >>> policy = {} - >>> - >>> response = client.create_autoscaling_policy(parent, policy) - - Args: - parent (str): Required. The "resource name" of the region or location, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.autoscalingPolicies.create``, the resource - name has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.autoscalingPolicies.create``, the resource - name has the following format: - ``projects/{project_id}/locations/{location}`` - policy (Union[dict, ~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy]): Required. The autoscaling policy to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_autoscaling_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "create_autoscaling_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_autoscaling_policy, - default_retry=self._method_configs["CreateAutoscalingPolicy"].retry, - default_timeout=self._method_configs["CreateAutoscalingPolicy"].timeout, - client_info=self._client_info, - ) - - request = autoscaling_policies_pb2.CreateAutoscalingPolicyRequest( - parent=parent, policy=policy - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_autoscaling_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_autoscaling_policy( - self, - policy, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates (replaces) autoscaling policy. - - Disabled check for update\_mask, because all updates will be full - replacements. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - >>> - >>> # TODO: Initialize `policy`: - >>> policy = {} - >>> - >>> response = client.update_autoscaling_policy(policy) - - Args: - policy (Union[dict, ~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy]): Required. The updated autoscaling policy. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_autoscaling_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "update_autoscaling_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_autoscaling_policy, - default_retry=self._method_configs["UpdateAutoscalingPolicy"].retry, - default_timeout=self._method_configs["UpdateAutoscalingPolicy"].timeout, - client_info=self._client_info, - ) - - request = autoscaling_policies_pb2.UpdateAutoscalingPolicyRequest(policy=policy) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("policy.name", policy.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_autoscaling_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_autoscaling_policy( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Retrieves autoscaling policy. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - >>> - >>> name = client.autoscaling_policy_path('[PROJECT]', '[REGION]', '[AUTOSCALING_POLICY]') - >>> - >>> response = client.get_autoscaling_policy(name) - - Args: - name (str): Required. The "resource name" of the autoscaling policy, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.autoscalingPolicies.get``, the resource name - of the policy has the following format: - ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` - - - For ``projects.locations.autoscalingPolicies.get``, the resource name - of the policy has the following format: - ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_autoscaling_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "get_autoscaling_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_autoscaling_policy, - default_retry=self._method_configs["GetAutoscalingPolicy"].retry, - default_timeout=self._method_configs["GetAutoscalingPolicy"].timeout, - client_info=self._client_info, - ) - - request = autoscaling_policies_pb2.GetAutoscalingPolicyRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_autoscaling_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_autoscaling_policies( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists autoscaling policies in the project. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - >>> - >>> parent = client.region_path('[PROJECT]', '[REGION]') - >>> - >>> # Iterate over all results - >>> for element in client.list_autoscaling_policies(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_autoscaling_policies(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The "resource name" of the region or location, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.autoscalingPolicies.list``, the resource name - of the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.autoscalingPolicies.list``, the resource - name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_autoscaling_policies" not in self._inner_api_calls: - self._inner_api_calls[ - "list_autoscaling_policies" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_autoscaling_policies, - default_retry=self._method_configs["ListAutoscalingPolicies"].retry, - default_timeout=self._method_configs["ListAutoscalingPolicies"].timeout, - client_info=self._client_info, - ) - - request = autoscaling_policies_pb2.ListAutoscalingPoliciesRequest( - parent=parent, page_size=page_size - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_autoscaling_policies"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="policies", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_autoscaling_policy( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an autoscaling policy. It is an error to delete an autoscaling - policy that is in use by one or more clusters. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - >>> - >>> name = client.autoscaling_policy_path('[PROJECT]', '[REGION]', '[AUTOSCALING_POLICY]') - >>> - >>> client.delete_autoscaling_policy(name) - - Args: - name (str): Required. The "resource name" of the autoscaling policy, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.autoscalingPolicies.delete``, the resource - name of the policy has the following format: - ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` - - - For ``projects.locations.autoscalingPolicies.delete``, the resource - name of the policy has the following format: - ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_autoscaling_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_autoscaling_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_autoscaling_policy, - default_retry=self._method_configs["DeleteAutoscalingPolicy"].retry, - default_timeout=self._method_configs["DeleteAutoscalingPolicy"].timeout, - client_info=self._client_info, - ) - - request = autoscaling_policies_pb2.DeleteAutoscalingPolicyRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_autoscaling_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client_config.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client_config.py deleted file mode 100644 index f41732281d42..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client_config.py +++ /dev/null @@ -1,48 +0,0 @@ -config = { - "interfaces": { - "google.cloud.dataproc.v1beta2.AutoscalingPolicyService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "CreateAutoscalingPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateAutoscalingPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetAutoscalingPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListAutoscalingPolicies": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteAutoscalingPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py deleted file mode 100644 index e64fc0d7ce35..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py +++ /dev/null @@ -1,881 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.dataproc.v1beta2 ClusterController API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.grpc_helpers -import google.api_core.operation -import google.api_core.operations_v1 -import google.api_core.page_iterator -import grpc - -from google.cloud.dataproc_v1beta2.gapic import cluster_controller_client_config -from google.cloud.dataproc_v1beta2.gapic import enums -from google.cloud.dataproc_v1beta2.gapic.transports import ( - cluster_controller_grpc_transport, -) -from google.cloud.dataproc_v1beta2.proto import autoscaling_policies_pb2 -from google.cloud.dataproc_v1beta2.proto import autoscaling_policies_pb2_grpc -from google.cloud.dataproc_v1beta2.proto import clusters_pb2 -from google.cloud.dataproc_v1beta2.proto import clusters_pb2_grpc -from google.cloud.dataproc_v1beta2.proto import operations_pb2 as proto_operations_pb2 -from google.longrunning import operations_pb2 as longrunning_operations_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version - - -class ClusterControllerClient(object): - """ - The ClusterControllerService provides methods to manage clusters - of Compute Engine instances. - """ - - SERVICE_ADDRESS = "dataproc.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.dataproc.v1beta2.ClusterController" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClusterControllerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.ClusterControllerGrpcTransport, - Callable[[~.Credentials, type], ~.ClusterControllerGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = cluster_controller_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=cluster_controller_grpc_transport.ClusterControllerGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = cluster_controller_grpc_transport.ClusterControllerGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_cluster( - self, - project_id, - region, - cluster, - request_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a cluster in a project. The returned ``Operation.metadata`` will - be - `ClusterOperationMetadata `__. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.ClusterControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `cluster`: - >>> cluster = {} - >>> - >>> response = client.create_cluster(project_id, region, cluster) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. - cluster (Union[dict, ~google.cloud.dataproc_v1beta2.types.Cluster]): Required. The cluster to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1beta2.types.Cluster` - request_id (str): Optional. A unique id used to identify the request. If the server - receives two ``CreateClusterRequest`` requests with the same id, then - the second request will be ignored and the first - ``google.longrunning.Operation`` created and stored in the backend is - returned. - - It is recommended to always set this value to a - `UUID `__. - - The id must contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). The maximum length is 40 characters. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_cluster" not in self._inner_api_calls: - self._inner_api_calls[ - "create_cluster" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_cluster, - default_retry=self._method_configs["CreateCluster"].retry, - default_timeout=self._method_configs["CreateCluster"].timeout, - client_info=self._client_info, - ) - - request = clusters_pb2.CreateClusterRequest( - project_id=project_id, region=region, cluster=cluster, request_id=request_id - ) - operation = self._inner_api_calls["create_cluster"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - clusters_pb2.Cluster, - metadata_type=proto_operations_pb2.ClusterOperationMetadata, - ) - - def update_cluster( - self, - project_id, - region, - cluster_name, - cluster, - update_mask, - graceful_decommission_timeout=None, - request_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a cluster in a project. The returned ``Operation.metadata`` will - be - `ClusterOperationMetadata `__. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.ClusterControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `cluster_name`: - >>> cluster_name = '' - >>> - >>> # TODO: Initialize `cluster`: - >>> cluster = {} - >>> - >>> # TODO: Initialize `update_mask`: - >>> update_mask = {} - >>> - >>> response = client.update_cluster(project_id, region, cluster_name, cluster, update_mask) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project the - cluster belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. - cluster_name (str): Required. The cluster name. - cluster (Union[dict, ~google.cloud.dataproc_v1beta2.types.Cluster]): Required. The changes to the cluster. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1beta2.types.Cluster` - update_mask (Union[dict, ~google.cloud.dataproc_v1beta2.types.FieldMask]): Required. Specifies the path, relative to ``Cluster``, of the field to - update. For example, to change the number of workers in a cluster to 5, - the ``update_mask`` parameter would be specified as - ``config.worker_config.num_instances``, and the ``PATCH`` request body - would specify the new value, as follows: - - :: - - { - "config":{ - "workerConfig":{ - "numInstances":"5" - } - } - } - - Similarly, to change the number of preemptible workers in a cluster to - 5, the ``update_mask`` parameter would be - ``config.secondary_worker_config.num_instances``, and the ``PATCH`` - request body would be set as follows: - - :: - - { - "config":{ - "secondaryWorkerConfig":{ - "numInstances":"5" - } - } - } - - Note: currently only the following fields can be updated: - - .. raw:: html - - - - - - - - - - - - - - - - - - - - - - - - - - -
MaskPurpose
labelsUpdates labels
config.worker_config.num_instancesResize primary worker - group
config.secondary_worker_config.num_instancesResize secondary - worker group
config.lifecycle_config.auto_delete_ttlReset MAX TTL - duration
config.lifecycle_config.auto_delete_timeUpdate MAX TTL - deletion timestamp
config.lifecycle_config.idle_delete_ttlUpdate Idle TTL - duration
config.autoscaling_config.policy_uriUse, stop using, or change - autoscaling policies
- - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1beta2.types.FieldMask` - graceful_decommission_timeout (Union[dict, ~google.cloud.dataproc_v1beta2.types.Duration]): Optional. Timeout for graceful YARN decomissioning. Graceful - decommissioning allows removing nodes from the cluster without - interrupting jobs in progress. Timeout specifies how long to wait for jobs - in progress to finish before forcefully removing nodes (and potentially - interrupting jobs). Default timeout is 0 (for forceful decommission), and - the maximum allowed timeout is 1 day. - - Only supported on Dataproc image versions 1.2 and higher. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1beta2.types.Duration` - request_id (str): Optional. A unique id used to identify the request. If the server - receives two ``UpdateClusterRequest`` requests with the same id, then - the second request will be ignored and the first - ``google.longrunning.Operation`` created and stored in the backend is - returned. - - It is recommended to always set this value to a - `UUID `__. - - The id must contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). The maximum length is 40 characters. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_cluster" not in self._inner_api_calls: - self._inner_api_calls[ - "update_cluster" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_cluster, - default_retry=self._method_configs["UpdateCluster"].retry, - default_timeout=self._method_configs["UpdateCluster"].timeout, - client_info=self._client_info, - ) - - request = clusters_pb2.UpdateClusterRequest( - project_id=project_id, - region=region, - cluster_name=cluster_name, - cluster=cluster, - update_mask=update_mask, - graceful_decommission_timeout=graceful_decommission_timeout, - request_id=request_id, - ) - operation = self._inner_api_calls["update_cluster"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - clusters_pb2.Cluster, - metadata_type=proto_operations_pb2.ClusterOperationMetadata, - ) - - def delete_cluster( - self, - project_id, - region, - cluster_name, - cluster_uuid=None, - request_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a cluster in a project. The returned ``Operation.metadata`` will - be - `ClusterOperationMetadata `__. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.ClusterControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `cluster_name`: - >>> cluster_name = '' - >>> - >>> response = client.delete_cluster(project_id, region, cluster_name) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. - cluster_name (str): Required. The cluster name. - cluster_uuid (str): Optional. Specifying the ``cluster_uuid`` means the RPC should fail - (with error NOT\_FOUND) if cluster with specified UUID does not exist. - request_id (str): Optional. A unique id used to identify the request. If the server - receives two ``DeleteClusterRequest`` requests with the same id, then - the second request will be ignored and the first - ``google.longrunning.Operation`` created and stored in the backend is - returned. - - It is recommended to always set this value to a - `UUID `__. - - The id must contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). The maximum length is 40 characters. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_cluster" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_cluster" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_cluster, - default_retry=self._method_configs["DeleteCluster"].retry, - default_timeout=self._method_configs["DeleteCluster"].timeout, - client_info=self._client_info, - ) - - request = clusters_pb2.DeleteClusterRequest( - project_id=project_id, - region=region, - cluster_name=cluster_name, - cluster_uuid=cluster_uuid, - request_id=request_id, - ) - operation = self._inner_api_calls["delete_cluster"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=proto_operations_pb2.ClusterOperationMetadata, - ) - - def get_cluster( - self, - project_id, - region, - cluster_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the resource representation for a cluster in a project. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.ClusterControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `cluster_name`: - >>> cluster_name = '' - >>> - >>> response = client.get_cluster(project_id, region, cluster_name) - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. - cluster_name (str): Required. The cluster name. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types.Cluster` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_cluster" not in self._inner_api_calls: - self._inner_api_calls[ - "get_cluster" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_cluster, - default_retry=self._method_configs["GetCluster"].retry, - default_timeout=self._method_configs["GetCluster"].timeout, - client_info=self._client_info, - ) - - request = clusters_pb2.GetClusterRequest( - project_id=project_id, region=region, cluster_name=cluster_name - ) - return self._inner_api_calls["get_cluster"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_clusters( - self, - project_id, - region, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists all regions/{region}/clusters in a project. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.ClusterControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # Iterate over all results - >>> for element in client.list_clusters(project_id, region): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_clusters(project_id, region).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. - filter_ (str): Optional. A filter constraining the clusters to list. Filters are - case-sensitive and have the following syntax: - - field = value [AND [field = value]] ... - - where **field** is one of ``status.state``, ``clusterName``, or - ``labels.[KEY]``, and ``[KEY]`` is a label key. **value** can be ``*`` - to match all values. ``status.state`` can be one of the following: - ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, ``ERROR``, - ``DELETING``, or ``UPDATING``. ``ACTIVE`` contains the ``CREATING``, - ``UPDATING``, and ``RUNNING`` states. ``INACTIVE`` contains the - ``DELETING`` and ``ERROR`` states. ``clusterName`` is the name of the - cluster provided at creation time. Only the logical ``AND`` operator is - supported; space-separated items are treated as having an implicit - ``AND`` operator. - - Example filter: - - status.state = ACTIVE AND clusterName = mycluster AND labels.env = - staging AND labels.starred = \* - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.dataproc_v1beta2.types.Cluster` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_clusters" not in self._inner_api_calls: - self._inner_api_calls[ - "list_clusters" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_clusters, - default_retry=self._method_configs["ListClusters"].retry, - default_timeout=self._method_configs["ListClusters"].timeout, - client_info=self._client_info, - ) - - request = clusters_pb2.ListClustersRequest( - project_id=project_id, region=region, filter=filter_, page_size=page_size - ) - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_clusters"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="clusters", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def diagnose_cluster( - self, - project_id, - region, - cluster_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets cluster diagnostic information. The returned ``Operation.metadata`` - will be - `ClusterOperationMetadata `__. - After the operation completes, ``Operation.response`` contains - `Empty `__. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.ClusterControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `cluster_name`: - >>> cluster_name = '' - >>> - >>> response = client.diagnose_cluster(project_id, region, cluster_name) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. - cluster_name (str): Required. The cluster name. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "diagnose_cluster" not in self._inner_api_calls: - self._inner_api_calls[ - "diagnose_cluster" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.diagnose_cluster, - default_retry=self._method_configs["DiagnoseCluster"].retry, - default_timeout=self._method_configs["DiagnoseCluster"].timeout, - client_info=self._client_info, - ) - - request = clusters_pb2.DiagnoseClusterRequest( - project_id=project_id, region=region, cluster_name=cluster_name - ) - operation = self._inner_api_calls["diagnose_cluster"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=clusters_pb2.DiagnoseClusterResults, - ) diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client_config.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client_config.py deleted file mode 100644 index e551c9299161..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client_config.py +++ /dev/null @@ -1,53 +0,0 @@ -config = { - "interfaces": { - "google.cloud.dataproc.v1beta2.ClusterController": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": ["UNAVAILABLE"], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 10000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 10000, - "total_timeout_millis": 300000, - } - }, - "methods": { - "CreateCluster": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateCluster": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteCluster": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetCluster": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListClusters": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DiagnoseCluster": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/enums.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/enums.py deleted file mode 100644 index 8c3b09801f81..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/enums.py +++ /dev/null @@ -1,304 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class Component(enum.IntEnum): - """ - Cluster components that can be activated. - - Attributes: - COMPONENT_UNSPECIFIED (int): Unspecified component. - ANACONDA (int): The Anaconda python distribution. - DRUID (int): The Druid query engine. - HIVE_WEBHCAT (int): The Hive Web HCatalog (the REST service for accessing HCatalog). - JUPYTER (int): The Jupyter Notebook. - KERBEROS (int): The Kerberos security feature. - PRESTO (int): The Presto query engine. - ZEPPELIN (int): The Zeppelin notebook. - ZOOKEEPER (int): The Zookeeper service. - """ - - COMPONENT_UNSPECIFIED = 0 - ANACONDA = 5 - DRUID = 9 - HIVE_WEBHCAT = 3 - JUPYTER = 1 - KERBEROS = 7 - PRESTO = 6 - ZEPPELIN = 4 - ZOOKEEPER = 8 - - -class ClusterOperationStatus(object): - class State(enum.IntEnum): - """ - The operation state. - - Attributes: - UNKNOWN (int): Unused. - PENDING (int): The operation has been created. - RUNNING (int): The operation is running. - DONE (int): The operation is done; either cancelled or completed. - """ - - UNKNOWN = 0 - PENDING = 1 - RUNNING = 2 - DONE = 3 - - -class ClusterStatus(object): - class State(enum.IntEnum): - """ - The cluster state. - - Attributes: - UNKNOWN (int): The cluster state is unknown. - CREATING (int): The cluster is being created and set up. It is not ready for use. - RUNNING (int): The cluster is currently running and healthy. It is ready for use. - ERROR (int): The cluster encountered an error. It is not ready for use. - DELETING (int): The cluster is being deleted. It cannot be used. - UPDATING (int): The cluster is being updated. It continues to accept and process jobs. - """ - - UNKNOWN = 0 - CREATING = 1 - RUNNING = 2 - ERROR = 3 - DELETING = 4 - UPDATING = 5 - - class Substate(enum.IntEnum): - """ - The cluster substate. - - Attributes: - UNSPECIFIED (int): The cluster substate is unknown. - UNHEALTHY (int): The cluster is known to be in an unhealthy state - (for example, critical daemons are not running or HDFS capacity is - exhausted). - - Applies to RUNNING state. - STALE_STATUS (int): The agent-reported status is out of date (may occur if - Cloud Dataproc loses communication with Agent). - - Applies to RUNNING state. - """ - - UNSPECIFIED = 0 - UNHEALTHY = 1 - STALE_STATUS = 2 - - -class JobStatus(object): - class State(enum.IntEnum): - """ - The job state. - - Attributes: - STATE_UNSPECIFIED (int): The job state is unknown. - PENDING (int): The job is pending; it has been submitted, but is not yet running. - SETUP_DONE (int): Job has been received by the service and completed initial setup; - it will soon be submitted to the cluster. - RUNNING (int): The job is running on the cluster. - CANCEL_PENDING (int): A CancelJob request has been received, but is pending. - CANCEL_STARTED (int): Transient in-flight resources have been canceled, and the request to - cancel the running job has been issued to the cluster. - CANCELLED (int): The job cancellation was successful. - DONE (int): The job has completed successfully. - ERROR (int): The job has completed, but encountered an error. - ATTEMPT_FAILURE (int): Job attempt has failed. The detail field contains failure details for - this attempt. - - Applies to restartable jobs only. - """ - - STATE_UNSPECIFIED = 0 - PENDING = 1 - SETUP_DONE = 8 - RUNNING = 2 - CANCEL_PENDING = 3 - CANCEL_STARTED = 7 - CANCELLED = 4 - DONE = 5 - ERROR = 6 - ATTEMPT_FAILURE = 9 - - class Substate(enum.IntEnum): - """ - The job substate. - - Attributes: - UNSPECIFIED (int): The job substate is unknown. - SUBMITTED (int): The Job is submitted to the agent. - - Applies to RUNNING state. - QUEUED (int): The Job has been received and is awaiting execution (it may be waiting - for a condition to be met). See the "details" field for the reason for - the delay. - - Applies to RUNNING state. - STALE_STATUS (int): The agent-reported status is out of date, which may be caused by a - loss of communication between the agent and Cloud Dataproc. If the - agent does not send a timely update, the job will fail. - - Applies to RUNNING state. - """ - - UNSPECIFIED = 0 - SUBMITTED = 1 - QUEUED = 2 - STALE_STATUS = 3 - - -class ListJobsRequest(object): - class JobStateMatcher(enum.IntEnum): - """ - A matcher that specifies categories of job states. - - Attributes: - ALL (int): Match all jobs, regardless of state. - ACTIVE (int): Only match jobs in non-terminal states: PENDING, RUNNING, or - CANCEL\_PENDING. - NON_ACTIVE (int): Only match jobs in terminal states: CANCELLED, DONE, or ERROR. - """ - - ALL = 0 - ACTIVE = 1 - NON_ACTIVE = 2 - - -class LoggingConfig(object): - class Level(enum.IntEnum): - """ - The Log4j level for job execution. When running an `Apache - Hive `__ job, Cloud Dataproc configures the - Hive client to an equivalent verbosity level. - - Attributes: - LEVEL_UNSPECIFIED (int): Level is unspecified. Use default level for log4j. - ALL (int): Use ALL level for log4j. - TRACE (int): Use TRACE level for log4j. - DEBUG (int): Use DEBUG level for log4j. - INFO (int): Use INFO level for log4j. - WARN (int): Use WARN level for log4j. - ERROR (int): Use ERROR level for log4j. - FATAL (int): Use FATAL level for log4j. - OFF (int): Turn off log4j. - """ - - LEVEL_UNSPECIFIED = 0 - ALL = 1 - TRACE = 2 - DEBUG = 3 - INFO = 4 - WARN = 5 - ERROR = 6 - FATAL = 7 - OFF = 8 - - -class ReservationAffinity(object): - class Type(enum.IntEnum): - """ - Indicates whether to consume capacity from an reservation or not. - - Attributes: - TYPE_UNSPECIFIED (int) - NO_RESERVATION (int): Do not consume from any allocated capacity. - ANY_RESERVATION (int): Consume any reservation available. - SPECIFIC_RESERVATION (int): Must consume from a specific reservation. Must specify key value fields - for specifying the reservations. - """ - - TYPE_UNSPECIFIED = 0 - NO_RESERVATION = 1 - ANY_RESERVATION = 2 - SPECIFIC_RESERVATION = 3 - - -class WorkflowMetadata(object): - class State(enum.IntEnum): - """ - The operation state. - - Attributes: - UNKNOWN (int): Unused. - PENDING (int): The operation has been created. - RUNNING (int): The operation is running. - DONE (int): The operation is done; either cancelled or completed. - """ - - UNKNOWN = 0 - PENDING = 1 - RUNNING = 2 - DONE = 3 - - -class WorkflowNode(object): - class NodeState(enum.IntEnum): - """ - The workflow node state. - - Attributes: - NODE_STATUS_UNSPECIFIED (int): State is unspecified. - BLOCKED (int): The node is awaiting prerequisite node to finish. - RUNNABLE (int): The node is runnable but not running. - RUNNING (int): The node is running. - COMPLETED (int): The node completed successfully. - FAILED (int): The node failed. A node can be marked FAILED because - its ancestor or peer failed. - """ - - NODE_STATUS_UNSPECIFIED = 0 - BLOCKED = 1 - RUNNABLE = 2 - RUNNING = 3 - COMPLETED = 4 - FAILED = 5 - - -class YarnApplication(object): - class State(enum.IntEnum): - """ - The application state, corresponding to - YarnProtos.YarnApplicationStateProto. - - Attributes: - STATE_UNSPECIFIED (int): Status is unspecified. - NEW (int): Status is NEW. - NEW_SAVING (int): Status is NEW\_SAVING. - SUBMITTED (int): Status is SUBMITTED. - ACCEPTED (int): Status is ACCEPTED. - RUNNING (int): Status is RUNNING. - FINISHED (int): Status is FINISHED. - FAILED (int): Status is FAILED. - KILLED (int): Status is KILLED. - """ - - STATE_UNSPECIFIED = 0 - NEW = 1 - NEW_SAVING = 2 - SUBMITTED = 3 - ACCEPTED = 4 - RUNNING = 5 - FINISHED = 6 - FAILED = 7 - KILLED = 8 diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py deleted file mode 100644 index 21b6ca4941e0..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py +++ /dev/null @@ -1,707 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.dataproc.v1beta2 JobController API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import grpc - -from google.cloud.dataproc_v1beta2.gapic import enums -from google.cloud.dataproc_v1beta2.gapic import job_controller_client_config -from google.cloud.dataproc_v1beta2.gapic.transports import job_controller_grpc_transport -from google.cloud.dataproc_v1beta2.proto import autoscaling_policies_pb2 -from google.cloud.dataproc_v1beta2.proto import autoscaling_policies_pb2_grpc -from google.cloud.dataproc_v1beta2.proto import clusters_pb2 -from google.cloud.dataproc_v1beta2.proto import clusters_pb2_grpc -from google.cloud.dataproc_v1beta2.proto import jobs_pb2 -from google.cloud.dataproc_v1beta2.proto import jobs_pb2_grpc -from google.cloud.dataproc_v1beta2.proto import operations_pb2 as proto_operations_pb2 -from google.longrunning import operations_pb2 as longrunning_operations_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version - - -class JobControllerClient(object): - """The JobController provides methods to manage jobs.""" - - SERVICE_ADDRESS = "dataproc.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.dataproc.v1beta2.JobController" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - JobControllerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.JobControllerGrpcTransport, - Callable[[~.Credentials, type], ~.JobControllerGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = job_controller_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=job_controller_grpc_transport.JobControllerGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = job_controller_grpc_transport.JobControllerGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def submit_job( - self, - project_id, - region, - job, - request_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Submits a job to a cluster. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.JobControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `job`: - >>> job = {} - >>> - >>> response = client.submit_job(project_id, region, job) - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the job - belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. - job (Union[dict, ~google.cloud.dataproc_v1beta2.types.Job]): Required. The job resource. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1beta2.types.Job` - request_id (str): Optional. A unique id used to identify the request. If the server - receives two ``SubmitJobRequest`` requests with the same id, then the - second request will be ignored and the first ``Job`` created and stored - in the backend is returned. - - It is recommended to always set this value to a - `UUID `__. - - The id must contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). The maximum length is 40 characters. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types.Job` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "submit_job" not in self._inner_api_calls: - self._inner_api_calls[ - "submit_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.submit_job, - default_retry=self._method_configs["SubmitJob"].retry, - default_timeout=self._method_configs["SubmitJob"].timeout, - client_info=self._client_info, - ) - - request = jobs_pb2.SubmitJobRequest( - project_id=project_id, region=region, job=job, request_id=request_id - ) - return self._inner_api_calls["submit_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_job( - self, - project_id, - region, - job_id, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the resource representation for a job in a project. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.JobControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `job_id`: - >>> job_id = '' - >>> - >>> response = client.get_job(project_id, region, job_id) - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the job - belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. - job_id (str): Required. The job ID. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types.Job` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_job" not in self._inner_api_calls: - self._inner_api_calls[ - "get_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_job, - default_retry=self._method_configs["GetJob"].retry, - default_timeout=self._method_configs["GetJob"].timeout, - client_info=self._client_info, - ) - - request = jobs_pb2.GetJobRequest( - project_id=project_id, region=region, job_id=job_id - ) - return self._inner_api_calls["get_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_jobs( - self, - project_id, - region, - page_size=None, - cluster_name=None, - job_state_matcher=None, - filter_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists regions/{region}/jobs in a project. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.JobControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # Iterate over all results - >>> for element in client.list_jobs(project_id, region): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_jobs(project_id, region).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the job - belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - cluster_name (str): Optional. If set, the returned jobs list includes only jobs that were - submitted to the named cluster. - job_state_matcher (~google.cloud.dataproc_v1beta2.types.JobStateMatcher): Optional. Specifies enumerated categories of jobs to list. (default = - match ALL jobs). - - If ``filter`` is provided, ``jobStateMatcher`` will be ignored. - filter_ (str): Optional. A filter constraining the jobs to list. Filters are - case-sensitive and have the following syntax: - - [field = value] AND [field [= value]] ... - - where **field** is ``status.state`` or ``labels.[KEY]``, and ``[KEY]`` - is a label key. **value** can be ``*`` to match all values. - ``status.state`` can be either ``ACTIVE`` or ``NON_ACTIVE``. Only the - logical ``AND`` operator is supported; space-separated items are treated - as having an implicit ``AND`` operator. - - Example filter: - - status.state = ACTIVE AND labels.env = staging AND labels.starred = \* - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.dataproc_v1beta2.types.Job` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_jobs" not in self._inner_api_calls: - self._inner_api_calls[ - "list_jobs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_jobs, - default_retry=self._method_configs["ListJobs"].retry, - default_timeout=self._method_configs["ListJobs"].timeout, - client_info=self._client_info, - ) - - request = jobs_pb2.ListJobsRequest( - project_id=project_id, - region=region, - page_size=page_size, - cluster_name=cluster_name, - job_state_matcher=job_state_matcher, - filter=filter_, - ) - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_jobs"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="jobs", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def update_job( - self, - project_id, - region, - job_id, - job, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a job in a project. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.JobControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `job_id`: - >>> job_id = '' - >>> - >>> # TODO: Initialize `job`: - >>> job = {} - >>> - >>> # TODO: Initialize `update_mask`: - >>> update_mask = {} - >>> - >>> response = client.update_job(project_id, region, job_id, job, update_mask) - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the job - belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. - job_id (str): Required. The job ID. - job (Union[dict, ~google.cloud.dataproc_v1beta2.types.Job]): Required. The changes to the job. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1beta2.types.Job` - update_mask (Union[dict, ~google.cloud.dataproc_v1beta2.types.FieldMask]): Required. Specifies the path, relative to Job, of the field to update. - For example, to update the labels of a Job the update\_mask parameter - would be specified as labels, and the ``PATCH`` request body would - specify the new value. Note: Currently, labels is the only field that - can be updated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1beta2.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types.Job` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_job" not in self._inner_api_calls: - self._inner_api_calls[ - "update_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_job, - default_retry=self._method_configs["UpdateJob"].retry, - default_timeout=self._method_configs["UpdateJob"].timeout, - client_info=self._client_info, - ) - - request = jobs_pb2.UpdateJobRequest( - project_id=project_id, - region=region, - job_id=job_id, - job=job, - update_mask=update_mask, - ) - return self._inner_api_calls["update_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def cancel_job( - self, - project_id, - region, - job_id, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Starts a job cancellation request. To access the job resource after - cancellation, call - `regions/{region}/jobs.list `__ - or - `regions/{region}/jobs.get `__. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.JobControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `job_id`: - >>> job_id = '' - >>> - >>> response = client.cancel_job(project_id, region, job_id) - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the job - belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. - job_id (str): Required. The job ID. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types.Job` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "cancel_job" not in self._inner_api_calls: - self._inner_api_calls[ - "cancel_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.cancel_job, - default_retry=self._method_configs["CancelJob"].retry, - default_timeout=self._method_configs["CancelJob"].timeout, - client_info=self._client_info, - ) - - request = jobs_pb2.CancelJobRequest( - project_id=project_id, region=region, job_id=job_id - ) - return self._inner_api_calls["cancel_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_job( - self, - project_id, - region, - job_id, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes the job from the project. If the job is active, the delete - fails, and the response returns ``FAILED_PRECONDITION``. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.JobControllerClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `region`: - >>> region = '' - >>> - >>> # TODO: Initialize `job_id`: - >>> job_id = '' - >>> - >>> client.delete_job(project_id, region, job_id) - - Args: - project_id (str): Required. The ID of the Google Cloud Platform project that the job - belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. - job_id (str): Required. The job ID. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_job" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_job, - default_retry=self._method_configs["DeleteJob"].retry, - default_timeout=self._method_configs["DeleteJob"].timeout, - client_info=self._client_info, - ) - - request = jobs_pb2.DeleteJobRequest( - project_id=project_id, region=region, job_id=job_id - ) - self._inner_api_calls["delete_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/job_controller_client_config.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/job_controller_client_config.py deleted file mode 100644 index 9bacf1251078..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/job_controller_client_config.py +++ /dev/null @@ -1,53 +0,0 @@ -config = { - "interfaces": { - "google.cloud.dataproc.v1beta2.JobController": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": ["UNAVAILABLE"], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 30000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 30000, - "total_timeout_millis": 900000, - } - }, - "methods": { - "SubmitJob": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetJob": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListJobs": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "UpdateJob": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "CancelJob": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteJob": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/__init__.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py deleted file mode 100644 index 98651adee77d..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.dataproc_v1beta2.proto import autoscaling_policies_pb2_grpc - - -class AutoscalingPolicyServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.dataproc.v1beta2 AutoscalingPolicyService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="dataproc.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "autoscaling_policy_service_stub": autoscaling_policies_pb2_grpc.AutoscalingPolicyServiceStub( - channel - ) - } - - @classmethod - def create_channel( - cls, address="dataproc.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_autoscaling_policy(self): - """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.create_autoscaling_policy`. - - Creates new autoscaling policy. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["autoscaling_policy_service_stub"].CreateAutoscalingPolicy - - @property - def update_autoscaling_policy(self): - """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.update_autoscaling_policy`. - - Updates (replaces) autoscaling policy. - - Disabled check for update\_mask, because all updates will be full - replacements. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["autoscaling_policy_service_stub"].UpdateAutoscalingPolicy - - @property - def get_autoscaling_policy(self): - """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.get_autoscaling_policy`. - - Retrieves autoscaling policy. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["autoscaling_policy_service_stub"].GetAutoscalingPolicy - - @property - def list_autoscaling_policies(self): - """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.list_autoscaling_policies`. - - Lists autoscaling policies in the project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["autoscaling_policy_service_stub"].ListAutoscalingPolicies - - @property - def delete_autoscaling_policy(self): - """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.delete_autoscaling_policy`. - - Deletes an autoscaling policy. It is an error to delete an autoscaling - policy that is in use by one or more clusters. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["autoscaling_policy_service_stub"].DeleteAutoscalingPolicy diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py deleted file mode 100644 index 767268e536bc..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py +++ /dev/null @@ -1,204 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers -import google.api_core.operations_v1 - -from google.cloud.dataproc_v1beta2.proto import clusters_pb2_grpc - - -class ClusterControllerGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.dataproc.v1beta2 ClusterController API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="dataproc.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "cluster_controller_stub": clusters_pb2_grpc.ClusterControllerStub(channel) - } - - # Because this API includes a method that returns a - # long-running operation (proto: google.longrunning.Operation), - # instantiate an LRO client. - self._operations_client = google.api_core.operations_v1.OperationsClient( - channel - ) - - @classmethod - def create_channel( - cls, address="dataproc.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_cluster(self): - """Return the gRPC stub for :meth:`ClusterControllerClient.create_cluster`. - - Creates a cluster in a project. The returned ``Operation.metadata`` will - be - `ClusterOperationMetadata `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cluster_controller_stub"].CreateCluster - - @property - def update_cluster(self): - """Return the gRPC stub for :meth:`ClusterControllerClient.update_cluster`. - - Updates a cluster in a project. The returned ``Operation.metadata`` will - be - `ClusterOperationMetadata `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cluster_controller_stub"].UpdateCluster - - @property - def delete_cluster(self): - """Return the gRPC stub for :meth:`ClusterControllerClient.delete_cluster`. - - Deletes a cluster in a project. The returned ``Operation.metadata`` will - be - `ClusterOperationMetadata `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cluster_controller_stub"].DeleteCluster - - @property - def get_cluster(self): - """Return the gRPC stub for :meth:`ClusterControllerClient.get_cluster`. - - Gets the resource representation for a cluster in a project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cluster_controller_stub"].GetCluster - - @property - def list_clusters(self): - """Return the gRPC stub for :meth:`ClusterControllerClient.list_clusters`. - - Lists all regions/{region}/clusters in a project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cluster_controller_stub"].ListClusters - - @property - def diagnose_cluster(self): - """Return the gRPC stub for :meth:`ClusterControllerClient.diagnose_cluster`. - - Gets cluster diagnostic information. The returned ``Operation.metadata`` - will be - `ClusterOperationMetadata `__. - After the operation completes, ``Operation.response`` contains - `Empty `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cluster_controller_stub"].DiagnoseCluster diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py deleted file mode 100644 index 33bcf39e1728..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py +++ /dev/null @@ -1,189 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.dataproc_v1beta2.proto import jobs_pb2_grpc - - -class JobControllerGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.dataproc.v1beta2 JobController API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="dataproc.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = {"job_controller_stub": jobs_pb2_grpc.JobControllerStub(channel)} - - @classmethod - def create_channel( - cls, address="dataproc.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def submit_job(self): - """Return the gRPC stub for :meth:`JobControllerClient.submit_job`. - - Submits a job to a cluster. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["job_controller_stub"].SubmitJob - - @property - def get_job(self): - """Return the gRPC stub for :meth:`JobControllerClient.get_job`. - - Gets the resource representation for a job in a project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["job_controller_stub"].GetJob - - @property - def list_jobs(self): - """Return the gRPC stub for :meth:`JobControllerClient.list_jobs`. - - Lists regions/{region}/jobs in a project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["job_controller_stub"].ListJobs - - @property - def update_job(self): - """Return the gRPC stub for :meth:`JobControllerClient.update_job`. - - Updates a job in a project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["job_controller_stub"].UpdateJob - - @property - def cancel_job(self): - """Return the gRPC stub for :meth:`JobControllerClient.cancel_job`. - - Starts a job cancellation request. To access the job resource after - cancellation, call - `regions/{region}/jobs.list `__ - or - `regions/{region}/jobs.get `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["job_controller_stub"].CancelJob - - @property - def delete_job(self): - """Return the gRPC stub for :meth:`JobControllerClient.delete_job`. - - Deletes the job from the project. If the job is active, the delete - fails, and the response returns ``FAILED_PRECONDITION``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["job_controller_stub"].DeleteJob diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py deleted file mode 100644 index 39b8c85e8d7d..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py +++ /dev/null @@ -1,249 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers -import google.api_core.operations_v1 - -from google.cloud.dataproc_v1beta2.proto import workflow_templates_pb2_grpc - - -class WorkflowTemplateServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.dataproc.v1beta2 WorkflowTemplateService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="dataproc.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "workflow_template_service_stub": workflow_templates_pb2_grpc.WorkflowTemplateServiceStub( - channel - ) - } - - # Because this API includes a method that returns a - # long-running operation (proto: google.longrunning.Operation), - # instantiate an LRO client. - self._operations_client = google.api_core.operations_v1.OperationsClient( - channel - ) - - @classmethod - def create_channel( - cls, address="dataproc.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_workflow_template(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.create_workflow_template`. - - Creates new workflow template. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["workflow_template_service_stub"].CreateWorkflowTemplate - - @property - def get_workflow_template(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.get_workflow_template`. - - Retrieves the latest workflow template. - - Can retrieve previously instantiated template by specifying optional - version parameter. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["workflow_template_service_stub"].GetWorkflowTemplate - - @property - def instantiate_workflow_template(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.instantiate_workflow_template`. - - Instantiates a template and begins execution. - - The returned Operation can be used to track execution of workflow by - polling ``operations.get``. The Operation will complete when entire - workflow is finished. - - The running workflow can be aborted via ``operations.cancel``. This will - cause any inflight jobs to be cancelled and workflow-owned clusters to - be deleted. - - The ``Operation.metadata`` will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, ``Operation.response`` will be ``Empty``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["workflow_template_service_stub"].InstantiateWorkflowTemplate - - @property - def instantiate_inline_workflow_template(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.instantiate_inline_workflow_template`. - - Instantiates a template and begins execution. - - This method is equivalent to executing the sequence - ``CreateWorkflowTemplate``, ``InstantiateWorkflowTemplate``, - ``DeleteWorkflowTemplate``. - - The returned Operation can be used to track execution of workflow by - polling ``operations.get``. The Operation will complete when entire - workflow is finished. - - The running workflow can be aborted via ``operations.cancel``. This will - cause any inflight jobs to be cancelled and workflow-owned clusters to - be deleted. - - The ``Operation.metadata`` will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, ``Operation.response`` will be ``Empty``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs[ - "workflow_template_service_stub" - ].InstantiateInlineWorkflowTemplate - - @property - def update_workflow_template(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.update_workflow_template`. - - Updates (replaces) workflow template. The updated template - must contain version that matches the current server version. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["workflow_template_service_stub"].UpdateWorkflowTemplate - - @property - def list_workflow_templates(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.list_workflow_templates`. - - Lists workflows that match the specified filter in the request. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["workflow_template_service_stub"].ListWorkflowTemplates - - @property - def delete_workflow_template(self): - """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.delete_workflow_template`. - - Deletes a workflow template. It does not cancel in-progress workflows. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["workflow_template_service_stub"].DeleteWorkflowTemplate diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py deleted file mode 100644 index 5319e2f1f15c..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py +++ /dev/null @@ -1,946 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.dataproc.v1beta2 WorkflowTemplateService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.operation -import google.api_core.operations_v1 -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.cloud.dataproc_v1beta2.gapic import enums -from google.cloud.dataproc_v1beta2.gapic import workflow_template_service_client_config -from google.cloud.dataproc_v1beta2.gapic.transports import ( - workflow_template_service_grpc_transport, -) -from google.cloud.dataproc_v1beta2.proto import autoscaling_policies_pb2 -from google.cloud.dataproc_v1beta2.proto import autoscaling_policies_pb2_grpc -from google.cloud.dataproc_v1beta2.proto import clusters_pb2 -from google.cloud.dataproc_v1beta2.proto import clusters_pb2_grpc -from google.cloud.dataproc_v1beta2.proto import jobs_pb2 -from google.cloud.dataproc_v1beta2.proto import jobs_pb2_grpc -from google.cloud.dataproc_v1beta2.proto import operations_pb2 as proto_operations_pb2 -from google.cloud.dataproc_v1beta2.proto import workflow_templates_pb2 -from google.cloud.dataproc_v1beta2.proto import workflow_templates_pb2_grpc -from google.longrunning import operations_pb2 as longrunning_operations_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version - - -class WorkflowTemplateServiceClient(object): - """ - The API interface for managing Workflow Templates in the - Cloud Dataproc API. - """ - - SERVICE_ADDRESS = "dataproc.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.dataproc.v1beta2.WorkflowTemplateService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - WorkflowTemplateServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def region_path(cls, project, region): - """Return a fully-qualified region string.""" - return google.api_core.path_template.expand( - "projects/{project}/regions/{region}", project=project, region=region - ) - - @classmethod - def workflow_template_path(cls, project, region, workflow_template): - """Return a fully-qualified workflow_template string.""" - return google.api_core.path_template.expand( - "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}", - project=project, - region=region, - workflow_template=workflow_template, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.WorkflowTemplateServiceGrpcTransport, - Callable[[~.Credentials, type], ~.WorkflowTemplateServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = workflow_template_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=workflow_template_service_grpc_transport.WorkflowTemplateServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = workflow_template_service_grpc_transport.WorkflowTemplateServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_workflow_template( - self, - parent, - template, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates new workflow template. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() - >>> - >>> parent = client.region_path('[PROJECT]', '[REGION]') - >>> - >>> # TODO: Initialize `template`: - >>> template = {} - >>> - >>> response = client.create_workflow_template(parent, template) - - Args: - parent (str): Required. The resource name of the region or location, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.workflowTemplates,create``, the resource name - of the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.workflowTemplates.create``, the resource - name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The Dataproc workflow template to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_workflow_template" not in self._inner_api_calls: - self._inner_api_calls[ - "create_workflow_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_workflow_template, - default_retry=self._method_configs["CreateWorkflowTemplate"].retry, - default_timeout=self._method_configs["CreateWorkflowTemplate"].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.CreateWorkflowTemplateRequest( - parent=parent, template=template - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_workflow_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_workflow_template( - self, - name, - version=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Retrieves the latest workflow template. - - Can retrieve previously instantiated template by specifying optional - version parameter. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() - >>> - >>> name = client.workflow_template_path('[PROJECT]', '[REGION]', '[WORKFLOW_TEMPLATE]') - >>> - >>> response = client.get_workflow_template(name) - - Args: - name (str): Required. The resource name of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.workflowTemplates.get``, the resource name of - the template has the following format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For ``projects.locations.workflowTemplates.get``, the resource name - of the template has the following format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - version (int): Optional. The version of workflow template to retrieve. Only previously - instantiated versions can be retrieved. - - If unspecified, retrieves the current version. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_workflow_template" not in self._inner_api_calls: - self._inner_api_calls[ - "get_workflow_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_workflow_template, - default_retry=self._method_configs["GetWorkflowTemplate"].retry, - default_timeout=self._method_configs["GetWorkflowTemplate"].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.GetWorkflowTemplateRequest( - name=name, version=version - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_workflow_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def instantiate_workflow_template( - self, - name, - version=None, - instance_id=None, - request_id=None, - parameters=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Instantiates a template and begins execution. - - The returned Operation can be used to track execution of workflow by - polling ``operations.get``. The Operation will complete when entire - workflow is finished. - - The running workflow can be aborted via ``operations.cancel``. This will - cause any inflight jobs to be cancelled and workflow-owned clusters to - be deleted. - - The ``Operation.metadata`` will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, ``Operation.response`` will be ``Empty``. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() - >>> - >>> name = client.workflow_template_path('[PROJECT]', '[REGION]', '[WORKFLOW_TEMPLATE]') - >>> - >>> response = client.instantiate_workflow_template(name) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - name (str): Required. The resource name of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.workflowTemplates.instantiate``, the resource - name of the template has the following format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For ``projects.locations.workflowTemplates.instantiate``, the - resource name of the template has the following format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - version (int): Optional. The version of workflow template to instantiate. If specified, - the workflow will be instantiated only if the current version of - the workflow template has the supplied version. - - This option cannot be used to instantiate a previous version of - workflow template. - instance_id (str): Deprecated. Please use ``request_id`` field instead. - request_id (str): Optional. A tag that prevents multiple concurrent workflow instances - with the same tag from running. This mitigates risk of concurrent - instances started due to retries. - - It is recommended to always set this value to a - `UUID `__. - - The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). The maximum length is 40 characters. - parameters (dict[str -> str]): Optional. Map from parameter names to values that should be used for those - parameters. Values may not exceed 100 characters. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "instantiate_workflow_template" not in self._inner_api_calls: - self._inner_api_calls[ - "instantiate_workflow_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.instantiate_workflow_template, - default_retry=self._method_configs["InstantiateWorkflowTemplate"].retry, - default_timeout=self._method_configs[ - "InstantiateWorkflowTemplate" - ].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.InstantiateWorkflowTemplateRequest( - name=name, - version=version, - instance_id=instance_id, - request_id=request_id, - parameters=parameters, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["instantiate_workflow_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=workflow_templates_pb2.WorkflowMetadata, - ) - - def instantiate_inline_workflow_template( - self, - parent, - template, - instance_id=None, - request_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Instantiates a template and begins execution. - - This method is equivalent to executing the sequence - ``CreateWorkflowTemplate``, ``InstantiateWorkflowTemplate``, - ``DeleteWorkflowTemplate``. - - The returned Operation can be used to track execution of workflow by - polling ``operations.get``. The Operation will complete when entire - workflow is finished. - - The running workflow can be aborted via ``operations.cancel``. This will - cause any inflight jobs to be cancelled and workflow-owned clusters to - be deleted. - - The ``Operation.metadata`` will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, ``Operation.response`` will be ``Empty``. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() - >>> - >>> parent = client.region_path('[PROJECT]', '[REGION]') - >>> - >>> # TODO: Initialize `template`: - >>> template = {} - >>> - >>> response = client.instantiate_inline_workflow_template(parent, template) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - parent (str): Required. The resource name of the region or location, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.workflowTemplates,instantiateinline``, the - resource name of the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.workflowTemplates.instantiateinline``, the - resource name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The workflow template to instantiate. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` - instance_id (str): Deprecated. Please use ``request_id`` field instead. - request_id (str): Optional. A tag that prevents multiple concurrent workflow instances - with the same tag from running. This mitigates risk of concurrent - instances started due to retries. - - It is recommended to always set this value to a - `UUID `__. - - The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). The maximum length is 40 characters. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "instantiate_inline_workflow_template" not in self._inner_api_calls: - self._inner_api_calls[ - "instantiate_inline_workflow_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.instantiate_inline_workflow_template, - default_retry=self._method_configs[ - "InstantiateInlineWorkflowTemplate" - ].retry, - default_timeout=self._method_configs[ - "InstantiateInlineWorkflowTemplate" - ].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.InstantiateInlineWorkflowTemplateRequest( - parent=parent, - template=template, - instance_id=instance_id, - request_id=request_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["instantiate_inline_workflow_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=workflow_templates_pb2.WorkflowMetadata, - ) - - def update_workflow_template( - self, - template, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates (replaces) workflow template. The updated template - must contain version that matches the current server version. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() - >>> - >>> # TODO: Initialize `template`: - >>> template = {} - >>> - >>> response = client.update_workflow_template(template) - - Args: - template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The updated workflow template. - - The ``template.version`` field must match the current version. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_workflow_template" not in self._inner_api_calls: - self._inner_api_calls[ - "update_workflow_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_workflow_template, - default_retry=self._method_configs["UpdateWorkflowTemplate"].retry, - default_timeout=self._method_configs["UpdateWorkflowTemplate"].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.UpdateWorkflowTemplateRequest( - template=template - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("template.name", template.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_workflow_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_workflow_templates( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists workflows that match the specified filter in the request. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() - >>> - >>> parent = client.region_path('[PROJECT]', '[REGION]') - >>> - >>> # Iterate over all results - >>> for element in client.list_workflow_templates(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_workflow_templates(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The resource name of the region or location, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.workflowTemplates,list``, the resource name of - the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.workflowTemplates.list``, the resource name - of the location has the following format: - ``projects/{project_id}/locations/{location}`` - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_workflow_templates" not in self._inner_api_calls: - self._inner_api_calls[ - "list_workflow_templates" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_workflow_templates, - default_retry=self._method_configs["ListWorkflowTemplates"].retry, - default_timeout=self._method_configs["ListWorkflowTemplates"].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.ListWorkflowTemplatesRequest( - parent=parent, page_size=page_size - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_workflow_templates"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="templates", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_workflow_template( - self, - name, - version=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a workflow template. It does not cancel in-progress workflows. - - Example: - >>> from google.cloud import dataproc_v1beta2 - >>> - >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() - >>> - >>> name = client.workflow_template_path('[PROJECT]', '[REGION]', '[WORKFLOW_TEMPLATE]') - >>> - >>> client.delete_workflow_template(name) - - Args: - name (str): Required. The resource name of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names. - - - For ``projects.regions.workflowTemplates.delete``, the resource name - of the template has the following format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For ``projects.locations.workflowTemplates.instantiate``, the - resource name of the template has the following format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - version (int): Optional. The version of workflow template to delete. If specified, - will only delete the template if the current server version matches - specified version. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_workflow_template" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_workflow_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_workflow_template, - default_retry=self._method_configs["DeleteWorkflowTemplate"].retry, - default_timeout=self._method_configs["DeleteWorkflowTemplate"].timeout, - client_info=self._client_info, - ) - - request = workflow_templates_pb2.DeleteWorkflowTemplateRequest( - name=name, version=version - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_workflow_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py deleted file mode 100644 index 75aad0b5f8bf..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py +++ /dev/null @@ -1,58 +0,0 @@ -config = { - "interfaces": { - "google.cloud.dataproc.v1beta2.WorkflowTemplateService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": ["UNAVAILABLE"], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "CreateWorkflowTemplate": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetWorkflowTemplate": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "InstantiateWorkflowTemplate": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "InstantiateInlineWorkflowTemplate": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateWorkflowTemplate": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "ListWorkflowTemplates": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteWorkflowTemplate": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/__init__.py b/dataproc/google/cloud/dataproc_v1beta2/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto b/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto deleted file mode 100644 index 36d507c82638..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto +++ /dev/null @@ -1,351 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.dataproc.v1beta2; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "AutoscalingPoliciesProto"; -option java_package = "com.google.cloud.dataproc.v1beta2"; - -// The API interface for managing autoscaling policies in the -// Google Cloud Dataproc API. -service AutoscalingPolicyService { - option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates new autoscaling policy. - rpc CreateAutoscalingPolicy(CreateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { - option (google.api.http) = { - post: "/v1beta2/{parent=projects/*/locations/*}/autoscalingPolicies" - body: "policy" - additional_bindings { - post: "/v1beta2/{parent=projects/*/regions/*}/autoscalingPolicies" - body: "policy" - } - }; - option (google.api.method_signature) = "parent,policy"; - } - - // Updates (replaces) autoscaling policy. - // - // Disabled check for update_mask, because all updates will be full - // replacements. - rpc UpdateAutoscalingPolicy(UpdateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { - option (google.api.http) = { - put: "/v1beta2/{policy.name=projects/*/locations/*/autoscalingPolicies/*}" - body: "policy" - additional_bindings { - put: "/v1beta2/{policy.name=projects/*/regions/*/autoscalingPolicies/*}" - body: "policy" - } - }; - option (google.api.method_signature) = "policy"; - } - - // Retrieves autoscaling policy. - rpc GetAutoscalingPolicy(GetAutoscalingPolicyRequest) returns (AutoscalingPolicy) { - option (google.api.http) = { - get: "/v1beta2/{name=projects/*/locations/*/autoscalingPolicies/*}" - additional_bindings { - get: "/v1beta2/{name=projects/*/regions/*/autoscalingPolicies/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Lists autoscaling policies in the project. - rpc ListAutoscalingPolicies(ListAutoscalingPoliciesRequest) returns (ListAutoscalingPoliciesResponse) { - option (google.api.http) = { - get: "/v1beta2/{parent=projects/*/locations/*}/autoscalingPolicies" - additional_bindings { - get: "/v1beta2/{parent=projects/*/regions/*}/autoscalingPolicies" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes an autoscaling policy. It is an error to delete an autoscaling - // policy that is in use by one or more clusters. - rpc DeleteAutoscalingPolicy(DeleteAutoscalingPolicyRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1beta2/{name=projects/*/locations/*/autoscalingPolicies/*}" - additional_bindings { - delete: "/v1beta2/{name=projects/*/regions/*/autoscalingPolicies/*}" - } - }; - option (google.api.method_signature) = "name"; - } -} - -// Describes an autoscaling policy for Dataproc cluster autoscaler. -message AutoscalingPolicy { - option (google.api.resource) = { - type: "dataproc.googleapis.com/AutoscalingPolicy" - pattern: "projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}" - pattern: "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}" - history: ORIGINALLY_SINGLE_PATTERN - }; - - // Required. The policy id. - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). Cannot begin or end with underscore - // or hyphen. Must consist of between 3 and 50 characters. - string id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Output only. The "resource name" of the autoscaling policy, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies`, the resource name of the - // policy has the following format: - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - // - // * For `projects.locations.autoscalingPolicies`, the resource name of the - // policy has the following format: - // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` - string name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Required. Autoscaling algorithm for policy. - oneof algorithm { - BasicAutoscalingAlgorithm basic_algorithm = 3; - } - - // Required. Describes how the autoscaler will operate for primary workers. - InstanceGroupAutoscalingPolicyConfig worker_config = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Describes how the autoscaler will operate for secondary workers. - InstanceGroupAutoscalingPolicyConfig secondary_worker_config = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Basic algorithm for autoscaling. -message BasicAutoscalingAlgorithm { - // Required. YARN autoscaling configuration. - BasicYarnAutoscalingConfig yarn_config = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Duration between scaling events. A scaling period starts after - // the update operation from the previous event has completed. - // - // Bounds: [2m, 1d]. Default: 2m. - google.protobuf.Duration cooldown_period = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// Basic autoscaling configurations for YARN. -message BasicYarnAutoscalingConfig { - // Required. Timeout for YARN graceful decommissioning of Node Managers. - // Specifies the duration to wait for jobs to complete before forcefully - // removing workers (and potentially interrupting jobs). Only applicable to - // downscaling operations. - // - // Bounds: [0s, 1d]. - google.protobuf.Duration graceful_decommission_timeout = 5 [(google.api.field_behavior) = REQUIRED]; - - // Required. Fraction of average pending memory in the last cooldown period - // for which to add workers. A scale-up factor of 1.0 will result in scaling - // up so that there is no pending memory remaining after the update (more - // aggressive scaling). A scale-up factor closer to 0 will result in a smaller - // magnitude of scaling up (less aggressive scaling). - // - // Bounds: [0.0, 1.0]. - double scale_up_factor = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. Fraction of average pending memory in the last cooldown period - // for which to remove workers. A scale-down factor of 1 will result in - // scaling down so that there is no available memory remaining after the - // update (more aggressive scaling). A scale-down factor of 0 disables - // removing workers, which can be beneficial for autoscaling a single job. - // - // Bounds: [0.0, 1.0]. - double scale_down_factor = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Minimum scale-up threshold as a fraction of total cluster size - // before scaling occurs. For example, in a 20-worker cluster, a threshold of - // 0.1 means the autoscaler must recommend at least a 2-worker scale-up for - // the cluster to scale. A threshold of 0 means the autoscaler will scale up - // on any recommended change. - // - // Bounds: [0.0, 1.0]. Default: 0.0. - double scale_up_min_worker_fraction = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Minimum scale-down threshold as a fraction of total cluster size - // before scaling occurs. For example, in a 20-worker cluster, a threshold of - // 0.1 means the autoscaler must recommend at least a 2 worker scale-down for - // the cluster to scale. A threshold of 0 means the autoscaler will scale down - // on any recommended change. - // - // Bounds: [0.0, 1.0]. Default: 0.0. - double scale_down_min_worker_fraction = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// Configuration for the size bounds of an instance group, including its -// proportional size to other groups. -message InstanceGroupAutoscalingPolicyConfig { - // Optional. Minimum number of instances for this group. - // - // Primary workers - Bounds: [2, max_instances]. Default: 2. - // Secondary workers - Bounds: [0, max_instances]. Default: 0. - int32 min_instances = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Maximum number of instances for this group. Required for primary - // workers. Note that by default, clusters will not use secondary workers. - // Required for secondary workers if the minimum secondary instances is set. - // - // Primary workers - Bounds: [min_instances, ). Required. - // Secondary workers - Bounds: [min_instances, ). Default: 0. - int32 max_instances = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Weight for the instance group, which is used to determine the - // fraction of total workers in the cluster from this instance group. - // For example, if primary workers have weight 2, and secondary workers have - // weight 1, the cluster will have approximately 2 primary workers for each - // secondary worker. - // - // The cluster may not reach the specified balance if constrained - // by min/max bounds or other autoscaling settings. For example, if - // `max_instances` for secondary workers is 0, then only primary workers will - // be added. The cluster can also be out of balance when created. - // - // If weight is not set on any instance group, the cluster will default to - // equal weight for all groups: the cluster will attempt to maintain an equal - // number of workers in each group within the configured size bounds for each - // group. If weight is set for one group only, the cluster will default to - // zero weight on the unset group. For example if weight is set only on - // primary workers, the cluster will use primary workers only and no - // secondary workers. - int32 weight = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to create an autoscaling policy. -message CreateAutoscalingPolicyRequest { - // Required. The "resource name" of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies.create`, the resource name - // has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.autoscalingPolicies.create`, the resource name - // has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/AutoscalingPolicy" - } - ]; - - // Required. The autoscaling policy to create. - AutoscalingPolicy policy = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to fetch an autoscaling policy. -message GetAutoscalingPolicyRequest { - // Required. The "resource name" of the autoscaling policy, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies.get`, the resource name - // of the policy has the following format: - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - // - // * For `projects.locations.autoscalingPolicies.get`, the resource name - // of the policy has the following format: - // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/AutoscalingPolicy" - } - ]; -} - -// A request to update an autoscaling policy. -message UpdateAutoscalingPolicyRequest { - // Required. The updated autoscaling policy. - AutoscalingPolicy policy = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/AutoscalingPolicy" - } - ]; -} - -// A request to delete an autoscaling policy. -// -// Autoscaling policies in use by one or more clusters will not be deleted. -message DeleteAutoscalingPolicyRequest { - // Required. The "resource name" of the autoscaling policy, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies.delete`, the resource name - // of the policy has the following format: - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - // - // * For `projects.locations.autoscalingPolicies.delete`, the resource name - // of the policy has the following format: - // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/AutoscalingPolicy" - } - ]; -} - -// A request to list autoscaling policies in a project. -message ListAutoscalingPoliciesRequest { - // Required. The "resource name" of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies.list`, the resource name - // of the region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.autoscalingPolicies.list`, the resource name - // of the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/AutoscalingPolicy" - } - ]; - - // Optional. The maximum number of results to return in each response. - // Must be less than or equal to 1000. Defaults to 100. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The page token, returned by a previous call, to request the - // next page of results. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// A response to a request to list autoscaling policies in a project. -message ListAutoscalingPoliciesResponse { - // Output only. Autoscaling policies list. - repeated AutoscalingPolicy policies = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. This token is included in the response if there are more - // results to fetch. - string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py deleted file mode 100644 index 7c3be0284cae..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py +++ /dev/null @@ -1,1212 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto", - package="google.cloud.dataproc.v1beta2", - syntax="proto3", - serialized_options=_b( - "\n!com.google.cloud.dataproc.v1beta2B\030AutoscalingPoliciesProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" - ), - serialized_pb=_b( - '\n>google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto"\xb9\x04\n\x11\x41utoscalingPolicy\x12\x0f\n\x02id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04name\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12S\n\x0f\x62\x61sic_algorithm\x18\x03 \x01(\x0b\x32\x38.google.cloud.dataproc.v1beta2.BasicAutoscalingAlgorithmH\x00\x12_\n\rworker_config\x18\x04 \x01(\x0b\x32\x43.google.cloud.dataproc.v1beta2.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x02\x12i\n\x17secondary_worker_config\x18\x05 \x01(\x0b\x32\x43.google.cloud.dataproc.v1beta2.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x01:\xd1\x01\xea\x41\xcd\x01\n)dataproc.googleapis.com/AutoscalingPolicy\x12Lprojects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}\x12Pprojects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy} \x01\x42\x0b\n\talgorithm"\xa9\x01\n\x19\x42\x61sicAutoscalingAlgorithm\x12S\n\x0byarn_config\x18\x01 \x01(\x0b\x32\x39.google.cloud.dataproc.v1beta2.BasicYarnAutoscalingConfigB\x03\xe0\x41\x02\x12\x37\n\x0f\x63ooldown_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\xf9\x01\n\x1a\x42\x61sicYarnAutoscalingConfig\x12\x45\n\x1dgraceful_decommission_timeout\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12\x1c\n\x0fscale_up_factor\x18\x01 \x01(\x01\x42\x03\xe0\x41\x02\x12\x1e\n\x11scale_down_factor\x18\x02 \x01(\x01\x42\x03\xe0\x41\x02\x12)\n\x1cscale_up_min_worker_fraction\x18\x03 \x01(\x01\x42\x03\xe0\x41\x01\x12+\n\x1escale_down_min_worker_fraction\x18\x04 \x01(\x01\x42\x03\xe0\x41\x01"s\n$InstanceGroupAutoscalingPolicyConfig\x12\x1a\n\rmin_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1a\n\rmax_instances\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x13\n\x06weight\x18\x03 \x01(\x05\x42\x03\xe0\x41\x01"\xaa\x01\n\x1e\x43reateAutoscalingPolicyRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12\x45\n\x06policy\x18\x02 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicyB\x03\xe0\x41\x02"^\n\x1bGetAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"\x95\x01\n\x1eUpdateAutoscalingPolicyRequest\x12s\n\x06policy\x18\x01 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicyB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"a\n\x1e\x44\x65leteAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"\x94\x01\n\x1eListAutoscalingPoliciesRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x88\x01\n\x1fListAutoscalingPoliciesResponse\x12G\n\x08policies\x18\x01 \x03(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicyB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\x32\x8f\x0c\n\x18\x41utoscalingPolicyService\x12\xb0\x02\n\x17\x43reateAutoscalingPolicy\x12=.google.cloud.dataproc.v1beta2.CreateAutoscalingPolicyRequest\x1a\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicy"\xa3\x01\x82\xd3\xe4\x93\x02\x8c\x01".google.cloud.dataproc.v1beta2.ListAutoscalingPoliciesResponse"\x8b\x01\x82\xd3\xe4\x93\x02|\x12 labels = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. Cluster status. - ClusterStatus status = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The previous cluster status. - repeated ClusterStatus status_history = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc - // generates this value when it creates the cluster. - string cluster_uuid = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Contains cluster daemon metrics such as HDFS and YARN stats. - // - // **Beta Feature**: This report is available for testing purposes only. It - // may be changed before final release. - ClusterMetrics metrics = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The cluster config. -message ClusterConfig { - // Optional. A Google Cloud Storage bucket used to stage job - // dependencies, config files, and job driver console output. - // If you do not specify a staging bucket, Cloud - // Dataproc will determine a Cloud Storage location (US, - // ASIA, or EU) for your cluster's staging bucket according to the Google - // Compute Engine zone where your cluster is deployed, and then create - // and manage this project-level, per-location bucket (see - // [Cloud Dataproc staging - // bucket](/dataproc/docs/concepts/configuring-clusters/staging-bucket)). - string config_bucket = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The shared Compute Engine config settings for - // all instances in a cluster. - GceClusterConfig gce_cluster_config = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine config settings for - // the master instance in a cluster. - InstanceGroupConfig master_config = 9 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine config settings for - // worker instances in a cluster. - InstanceGroupConfig worker_config = 10 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine config settings for - // additional worker instances in a cluster. - InstanceGroupConfig secondary_worker_config = 12 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The config settings for software inside the cluster. - SoftwareConfig software_config = 13 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The config setting for auto delete cluster schedule. - LifecycleConfig lifecycle_config = 14 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Commands to execute on each node after config is - // completed. By default, executables are run on master and all worker nodes. - // You can test a node's role metadata to run an executable on - // a master or worker node, as shown below using `curl` (you can also use - // `wget`): - // - // ROLE=$(curl -H Metadata-Flavor:Google - // http://metadata/computeMetadata/v1beta2/instance/attributes/dataproc-role) - // if [[ "${ROLE}" == 'Master' ]]; then - // ... master specific actions ... - // else - // ... worker specific actions ... - // fi - repeated NodeInitializationAction initialization_actions = 11 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Encryption settings for the cluster. - EncryptionConfig encryption_config = 15 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Autoscaling config for the policy associated with the cluster. - // Cluster does not autoscale if this field is unset. - AutoscalingConfig autoscaling_config = 16 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Port/endpoint configuration for this cluster - EndpointConfig endpoint_config = 17 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Security related configuration. - SecurityConfig security_config = 18 [(google.api.field_behavior) = OPTIONAL]; -} - -// Endpoint config for this cluster -message EndpointConfig { - // Output only. The map of port descriptions to URLs. Will only be populated - // if enable_http_port_access is true. - map http_ports = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. If true, enable http access to specific ports on the cluster - // from external sources. Defaults to false. - bool enable_http_port_access = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// Autoscaling Policy config associated with the cluster. -message AutoscalingConfig { - // Optional. The autoscaling policy used by the cluster. - // - // Only resource names including projectid and location (region) are valid. - // Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` - // * `projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` - // - // Note that the policy must be in the same project and Cloud Dataproc region. - string policy_uri = 1 [(google.api.field_behavior) = OPTIONAL]; -} - -// Encryption settings for the cluster. -message EncryptionConfig { - // Optional. The Cloud KMS key name to use for PD disk encryption for all - // instances in the cluster. - string gce_pd_kms_key_name = 1 [(google.api.field_behavior) = OPTIONAL]; -} - -// Common config settings for resources of Compute Engine cluster -// instances, applicable to all instances in the cluster. -message GceClusterConfig { - // Optional. The zone where the Compute Engine cluster will be located. - // On a create request, it is required in the "global" region. If omitted - // in a non-global Cloud Dataproc region, the service will pick a zone in the - // corresponding Compute Engine region. On a get request, zone will always be - // present. - // - // A full URL, partial URI, or short name are valid. Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]` - // * `projects/[project_id]/zones/[zone]` - // * `us-central1-f` - string zone_uri = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine network to be used for machine - // communications. Cannot be specified with subnetwork_uri. If neither - // `network_uri` nor `subnetwork_uri` is specified, the "default" network of - // the project is used, if it exists. Cannot be a "Custom Subnet Network" (see - // [Using Subnetworks](/compute/docs/subnetworks) for more information). - // - // A full URL, partial URI, or short name are valid. Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/global/default` - // * `projects/[project_id]/regions/global/default` - // * `default` - string network_uri = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine subnetwork to be used for machine - // communications. Cannot be specified with network_uri. - // - // A full URL, partial URI, or short name are valid. Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0` - // * `projects/[project_id]/regions/us-east1/subnetworks/sub0` - // * `sub0` - string subnetwork_uri = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If true, all instances in the cluster will only have internal IP - // addresses. By default, clusters are not restricted to internal IP - // addresses, and will have ephemeral external IP addresses assigned to each - // instance. This `internal_ip_only` restriction can only be enabled for - // subnetwork enabled networks, and all off-cluster dependencies must be - // configured to be accessible without external IP addresses. - bool internal_ip_only = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The service account of the instances. Defaults to the default - // Compute Engine service account. Custom service accounts need - // permissions equivalent to the following IAM roles: - // - // * roles/logging.logWriter - // * roles/storage.objectAdmin - // - // (see - // https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts - // for more information). - // Example: `[account_id]@[project_id].iam.gserviceaccount.com` - string service_account = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The URIs of service account scopes to be included in - // Compute Engine instances. The following base set of scopes is always - // included: - // - // * https://www.googleapis.com/auth/cloud.useraccounts.readonly - // * https://www.googleapis.com/auth/devstorage.read_write - // * https://www.googleapis.com/auth/logging.write - // - // If no scopes are specified, the following defaults are also provided: - // - // * https://www.googleapis.com/auth/bigquery - // * https://www.googleapis.com/auth/bigtable.admin.table - // * https://www.googleapis.com/auth/bigtable.data - // * https://www.googleapis.com/auth/devstorage.full_control - repeated string service_account_scopes = 3 [(google.api.field_behavior) = OPTIONAL]; - - // The Compute Engine tags to add to all instances (see - // [Tagging instances](/compute/docs/label-or-tag-resources#tags)). - repeated string tags = 4; - - // The Compute Engine metadata entries to add to all instances (see - // [Project and instance - // metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). - map metadata = 5; - - // Optional. Reservation Affinity for consuming Zonal reservation. - ReservationAffinity reservation_affinity = 11 [(google.api.field_behavior) = OPTIONAL]; -} - -// The config settings for Compute Engine resources in -// an instance group, such as a master or worker group. -message InstanceGroupConfig { - // Optional. The number of VM instances in the instance group. - // For master instance groups, must be set to 1. - int32 num_instances = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The list of instance names. Cloud Dataproc derives the names - // from `cluster_name`, `num_instances`, and the instance group. - repeated string instance_names = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The Compute Engine image resource used for cluster - // instances. It can be specified or may be inferred from - // `SoftwareConfig.image_version`. - string image_uri = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine machine type used for cluster instances. - // - // A full URL, partial URI, or short name are valid. Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` - // * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` - // * `n1-standard-2` - // - // **Auto Zone Exception**: If you are using the Cloud Dataproc - // [Auto Zone - // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) - // feature, you must use the short name of the machine type - // resource, for example, `n1-standard-2`. - string machine_type_uri = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Disk option config settings. - DiskConfig disk_config = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Specifies that this instance group contains preemptible - // instances. - bool is_preemptible = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The config for Compute Engine Instance Group - // Manager that manages this group. - // This is only used for preemptible instance groups. - ManagedGroupConfig managed_group_config = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The Compute Engine accelerator configuration for these - // instances. - repeated AcceleratorConfig accelerators = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Specifies the minimum cpu platform for the Instance Group. - // See [Cloud Dataproc→Minimum CPU Platform] - // (/dataproc/docs/concepts/compute/dataproc-min-cpu). - string min_cpu_platform = 9; -} - -// Specifies the resources used to actively manage an instance group. -message ManagedGroupConfig { - // Output only. The name of the Instance Template used for the Managed - // Instance Group. - string instance_template_name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The name of the Instance Group Manager for this group. - string instance_group_manager_name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Specifies the type and number of accelerator cards attached to the instances -// of an instance group (see [GPUs on Compute Engine](/compute/docs/gpus/)). -message AcceleratorConfig { - // Full URL, partial URI, or short name of the accelerator type resource to - // expose to this instance. See - // [Compute Engine - // AcceleratorTypes](/compute/docs/reference/beta/acceleratorTypes) - // - // Examples - // * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` - // * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` - // * `nvidia-tesla-k80` - // - // **Auto Zone Exception**: If you are using the Cloud Dataproc - // [Auto Zone - // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) - // feature, you must use the short name of the accelerator type - // resource, for example, `nvidia-tesla-k80`. - string accelerator_type_uri = 1; - - // The number of the accelerator cards of this type exposed to this instance. - int32 accelerator_count = 2; -} - -// Specifies the config of disk options for a group of VM instances. -message DiskConfig { - // Optional. Type of the boot disk (default is "pd-standard"). - // Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or - // "pd-standard" (Persistent Disk Hard Disk Drive). - string boot_disk_type = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Size in GB of the boot disk (default is 500GB). - int32 boot_disk_size_gb = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Number of attached SSDs, from 0 to 4 (default is 0). - // If SSDs are not attached, the boot disk is used to store runtime logs and - // [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. - // If one or more SSDs are attached, this runtime bulk - // data is spread across them, and the boot disk contains only basic - // config and installed binaries. - int32 num_local_ssds = 2; -} - -// Specifies the cluster auto-delete schedule configuration. -message LifecycleConfig { - // Optional. The duration to keep the cluster alive while idling. - // Passing this threshold will cause the cluster to be - // deleted. Valid range: **[10m, 14d]**. - // - // Example: **"10m"**, the minimum value, to delete the - // cluster when it has had no jobs running for 10 minutes. - google.protobuf.Duration idle_delete_ttl = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Either the exact time the cluster should be deleted at or - // the cluster maximum age. - oneof ttl { - // Optional. The time when cluster will be auto-deleted. - google.protobuf.Timestamp auto_delete_time = 2; - - // Optional. The lifetime duration of cluster. The cluster will be - // auto-deleted at the end of this period. Valid range: **[10m, 14d]**. - // - // Example: **"1d"**, to delete the cluster 1 day after its creation.. - google.protobuf.Duration auto_delete_ttl = 3; - } - - // Output only. The time when cluster became idle (most recent job finished) - // and became eligible for deletion due to idleness. - google.protobuf.Timestamp idle_start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Security related configuration, including encryption, Kerberos, etc. -message SecurityConfig { - // Kerberos related configuration. - KerberosConfig kerberos_config = 1; -} - -// Specifies Kerberos related configuration. -message KerberosConfig { - // Optional. Flag to indicate whether to Kerberize the cluster. - bool enable_kerberos = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The Cloud Storage URI of a KMS encrypted file containing the root - // principal password. - string root_principal_password_uri = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The uri of the KMS key used to encrypt various sensitive - // files. - string kms_key_uri = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The Cloud Storage URI of the keystore file used for SSL - // encryption. If not provided, Dataproc will provide a self-signed - // certificate. - string keystore_uri = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of the truststore file used for SSL - // encryption. If not provided, Dataproc will provide a self-signed - // certificate. - string truststore_uri = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // password to the user provided keystore. For the self-signed certificate, - // this password is generated by Dataproc. - string keystore_password_uri = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // password to the user provided key. For the self-signed certificate, this - // password is generated by Dataproc. - string key_password_uri = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // password to the user provided truststore. For the self-signed certificate, - // this password is generated by Dataproc. - string truststore_password_uri = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The remote realm the Dataproc on-cluster KDC will trust, should - // the user enable cross realm trust. - string cross_realm_trust_realm = 9 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The KDC (IP or hostname) for the remote trusted realm in a cross - // realm trust relationship. - string cross_realm_trust_kdc = 10 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The admin server (IP or hostname) for the remote trusted realm in - // a cross realm trust relationship. - string cross_realm_trust_admin_server = 11 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // shared password between the on-cluster Kerberos realm and the remote - // trusted realm, in a cross realm trust relationship. - string cross_realm_trust_shared_password_uri = 12 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // master key of the KDC database. - string kdc_db_key_uri = 13 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The lifetime of the ticket granting ticket, in hours. - // If not specified, or user specifies 0, then default value 10 - // will be used. - int32 tgt_lifetime_hours = 14 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The name of the on-cluster Kerberos realm. - // If not specified, the uppercased domain of hostnames will be the realm. - string realm = 15 [(google.api.field_behavior) = OPTIONAL]; -} - -// Specifies an executable to run on a fully configured node and a -// timeout period for executable completion. -message NodeInitializationAction { - // Required. Cloud Storage URI of executable file. - string executable_file = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Amount of time executable has to complete. Default is - // 10 minutes. Cluster creation fails with an explanatory error message (the - // name of the executable that caused the error and the exceeded timeout - // period) if the executable is not completed at end of the timeout period. - google.protobuf.Duration execution_timeout = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// The status of a cluster and its instances. -message ClusterStatus { - // The cluster state. - enum State { - // The cluster state is unknown. - UNKNOWN = 0; - - // The cluster is being created and set up. It is not ready for use. - CREATING = 1; - - // The cluster is currently running and healthy. It is ready for use. - RUNNING = 2; - - // The cluster encountered an error. It is not ready for use. - ERROR = 3; - - // The cluster is being deleted. It cannot be used. - DELETING = 4; - - // The cluster is being updated. It continues to accept and process jobs. - UPDATING = 5; - } - - // The cluster substate. - enum Substate { - // The cluster substate is unknown. - UNSPECIFIED = 0; - - // The cluster is known to be in an unhealthy state - // (for example, critical daemons are not running or HDFS capacity is - // exhausted). - // - // Applies to RUNNING state. - UNHEALTHY = 1; - - // The agent-reported status is out of date (may occur if - // Cloud Dataproc loses communication with Agent). - // - // Applies to RUNNING state. - STALE_STATUS = 2; - } - - // Output only. The cluster's state. - State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Optional details of cluster's state. - string detail = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Time when this state was entered. - google.protobuf.Timestamp state_start_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Additional state information that includes - // status reported by the agent. - Substate substate = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Specifies the selection and config of software inside the cluster. -message SoftwareConfig { - // Optional. The version of software inside the cluster. It must be one of the - // supported [Cloud Dataproc - // Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions), - // such as "1.2" (including a subminor version, such as "1.2.29"), or the - // ["preview" - // version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). - // If unspecified, it defaults to the latest Debian version. - string image_version = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The properties to set on daemon config files. - // - // Property keys are specified in `prefix:property` format, for example - // `core:hadoop.tmp.dir`. The following are supported prefixes - // and their mappings: - // - // * capacity-scheduler: `capacity-scheduler.xml` - // * core: `core-site.xml` - // * distcp: `distcp-default.xml` - // * hdfs: `hdfs-site.xml` - // * hive: `hive-site.xml` - // * mapred: `mapred-site.xml` - // * pig: `pig.properties` - // * spark: `spark-defaults.conf` - // * yarn: `yarn-site.xml` - // - // For more information, see - // [Cluster properties](/dataproc/docs/concepts/cluster-properties). - map properties = 2 [(google.api.field_behavior) = OPTIONAL]; - - // The set of optional components to activate on the cluster. - repeated Component optional_components = 3; -} - -// Contains cluster daemon metrics, such as HDFS and YARN stats. -// -// **Beta Feature**: This report is available for testing purposes only. It may -// be changed before final release. -message ClusterMetrics { - // The HDFS metrics. - map hdfs_metrics = 1; - - // The YARN metrics. - map yarn_metrics = 2; -} - -// A request to create a cluster. -message CreateClusterRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Cloud Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster to create. - Cluster cluster = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A unique id used to identify the request. If the server - // receives two [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] requests with the same - // id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend - // is returned. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to update a cluster. -message UpdateClusterRequest { - // Required. The ID of the Google Cloud Platform project the - // cluster belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Cloud Dataproc region in which to handle the request. - string region = 5 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The changes to the cluster. - Cluster cluster = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Timeout for graceful YARN decomissioning. Graceful - // decommissioning allows removing nodes from the cluster without - // interrupting jobs in progress. Timeout specifies how long to wait for jobs - // in progress to finish before forcefully removing nodes (and potentially - // interrupting jobs). Default timeout is 0 (for forceful decommission), and - // the maximum allowed timeout is 1 day. - // - // Only supported on Dataproc image versions 1.2 and higher. - google.protobuf.Duration graceful_decommission_timeout = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Specifies the path, relative to `Cluster`, of - // the field to update. For example, to change the number of workers - // in a cluster to 5, the `update_mask` parameter would be - // specified as `config.worker_config.num_instances`, - // and the `PATCH` request body would specify the new value, as follows: - // - // { - // "config":{ - // "workerConfig":{ - // "numInstances":"5" - // } - // } - // } - // - // Similarly, to change the number of preemptible workers in a cluster to 5, - // the `update_mask` parameter would be - // `config.secondary_worker_config.num_instances`, and the `PATCH` request - // body would be set as follows: - // - // { - // "config":{ - // "secondaryWorkerConfig":{ - // "numInstances":"5" - // } - // } - // } - // Note: currently only the following fields can be updated: - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - //
MaskPurpose
labelsUpdates labels
config.worker_config.num_instancesResize primary worker - // group
config.secondary_worker_config.num_instancesResize secondary - // worker group
config.lifecycle_config.auto_delete_ttlReset MAX TTL - // duration
config.lifecycle_config.auto_delete_timeUpdate MAX TTL - // deletion timestamp
config.lifecycle_config.idle_delete_ttlUpdate Idle TTL - // duration
config.autoscaling_config.policy_uriUse, stop using, or change - // autoscaling policies
- google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A unique id used to identify the request. If the server - // receives two [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] requests with the same - // id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the - // backend is returned. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 7 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to delete a cluster. -message DeleteClusterRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Cloud Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Specifying the `cluster_uuid` means the RPC should fail - // (with error NOT_FOUND) if cluster with specified UUID does not exist. - string cluster_uuid = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A unique id used to identify the request. If the server - // receives two [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] requests with the same - // id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the - // backend is returned. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Request to get the resource representation for a cluster in a project. -message GetClusterRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Cloud Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to list the clusters in a project. -message ListClustersRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Cloud Dataproc region in which to handle the request. - string region = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A filter constraining the clusters to list. Filters are - // case-sensitive and have the following syntax: - // - // field = value [AND [field = value]] ... - // - // where **field** is one of `status.state`, `clusterName`, or `labels.[KEY]`, - // and `[KEY]` is a label key. **value** can be `*` to match all values. - // `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, - // `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` - // contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` - // contains the `DELETING` and `ERROR` states. - // `clusterName` is the name of the cluster provided at creation time. - // Only the logical `AND` operator is supported; space-separated items are - // treated as having an implicit `AND` operator. - // - // Example filter: - // - // status.state = ACTIVE AND clusterName = mycluster - // AND labels.env = staging AND labels.starred = * - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The standard List page size. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The standard List page token. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// The list of all clusters in a project. -message ListClustersResponse { - // Output only. The clusters in the project. - repeated Cluster clusters = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. This token is included in the response if there are more - // results to fetch. To fetch additional results, provide this value as the - // `page_token` in a subsequent ListClustersRequest. - string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to collect cluster diagnostic information. -message DiagnoseClusterRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Cloud Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// The location of diagnostic output. -message DiagnoseClusterResults { - // Output only. The Cloud Storage URI of the diagnostic output. - // The output report is a plain text file with a summary of collected - // diagnostics. - string output_uri = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Reservation Affinity for consuming Zonal reservation. -message ReservationAffinity { - // Indicates whether to consume capacity from an reservation or not. - enum Type { - TYPE_UNSPECIFIED = 0; - - // Do not consume from any allocated capacity. - NO_RESERVATION = 1; - - // Consume any reservation available. - ANY_RESERVATION = 2; - - // Must consume from a specific reservation. Must specify key value fields - // for specifying the reservations. - SPECIFIC_RESERVATION = 3; - } - - // Optional. Type of reservation to consume - Type consume_reservation_type = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Corresponds to the label key of reservation resource. - string key = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Corresponds to the label values of reservation resource. - repeated string values = 3 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py deleted file mode 100644 index d043480d7c22..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py +++ /dev/null @@ -1,4434 +0,0 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dataproc_v1beta2/proto/clusters.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.cloud.dataproc_v1beta2.proto import ( - operations_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_operations__pb2, -) -from google.cloud.dataproc_v1beta2.proto import ( - shared_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_shared__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dataproc_v1beta2/proto/clusters.proto", - package="google.cloud.dataproc.v1beta2", - syntax="proto3", - serialized_options=_b( - "\n!com.google.cloud.dataproc.v1beta2B\rClustersProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" - ), - serialized_pb=_b( - '\n2google/cloud/dataproc_v1beta2/proto/clusters.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x34google/cloud/dataproc_v1beta2/proto/operations.proto\x1a\x30google/cloud/dataproc_v1beta2/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xe6\x03\n\x07\x43luster\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\x06\x63onfig\x18\x03 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterConfigB\x03\xe0\x41\x02\x12G\n\x06labels\x18\x08 \x03(\x0b\x32\x32.google.cloud.dataproc.v1beta2.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12\x41\n\x06status\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatusB\x03\xe0\x41\x03\x12I\n\x0estatus_history\x18\x07 \x03(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x07metrics\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.ClusterMetricsB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xaf\x07\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12P\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.GceClusterConfigB\x03\xe0\x41\x01\x12N\n\rmaster_config\x18\t \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12N\n\rworker_config\x18\n \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12X\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12K\n\x0fsoftware_config\x18\r \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SoftwareConfigB\x03\xe0\x41\x01\x12M\n\x10lifecycle_config\x18\x0e \x01(\x0b\x32..google.cloud.dataproc.v1beta2.LifecycleConfigB\x03\xe0\x41\x01\x12\\\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.NodeInitializationActionB\x03\xe0\x41\x01\x12O\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.EncryptionConfigB\x03\xe0\x41\x01\x12Q\n\x12\x61utoscaling_config\x18\x10 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingConfigB\x03\xe0\x41\x01\x12K\n\x0f\x65ndpoint_config\x18\x11 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.EndpointConfigB\x03\xe0\x41\x01\x12K\n\x0fsecurity_config\x18\x12 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SecurityConfigB\x03\xe0\x41\x01"\xbf\x01\n\x0e\x45ndpointConfig\x12U\n\nhttp_ports\x18\x01 \x03(\x0b\x32<.google.cloud.dataproc.v1beta2.EndpointConfig.HttpPortsEntryB\x03\xe0\x41\x03\x12$\n\x17\x65nable_http_port_access\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01\x1a\x30\n\x0eHttpPortsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\xa9\x03\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12O\n\x08metadata\x18\x05 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.GceClusterConfig.MetadataEntry\x12U\n\x14reservation_affinity\x18\x0b \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ReservationAffinityB\x03\xe0\x41\x01\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa4\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x43\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x01\x12T\n\x14managed_group_config\x18\x07 \x01(\x0b\x32\x31.google.cloud.dataproc.v1beta2.ManagedGroupConfigB\x03\xe0\x41\x03\x12K\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AcceleratorConfigB\x03\xe0\x41\x01\x12\x18\n\x10min_cpu_platform\x18\t \x01(\t"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"a\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x16\n\x0enum_local_ssds\x18\x02 \x01(\x05"\xf9\x01\n\x0fLifecycleConfig\x12\x37\n\x0fidle_delete_ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x36\n\x10\x61uto_delete_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x34\n\x0f\x61uto_delete_ttl\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12\x38\n\x0fidle_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x42\x05\n\x03ttl"X\n\x0eSecurityConfig\x12\x46\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x8b\x03\n\rClusterStatus\x12\x46\n\x05state\x18\x01 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x13\n\x06\x64\x65tail\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12L\n\x08substate\x18\x04 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"\xfe\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12V\n\nproperties\x18\x02 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32(.google.cloud.dataproc.v1beta2.Component\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa4\x02\n\x0e\x43lusterMetrics\x12T\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.HdfsMetricsEntry\x12T\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x9b\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12<\n\x07\x63luster\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xb3\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12<\n\x07\x63luster\x18\x03 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"s\n\x14ListClustersResponse\x12=\n\x08\x63lusters\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03"\xfd\x01\n\x13ReservationAffinity\x12^\n\x18\x63onsume_reservation_type\x18\x01 \x01(\x0e\x32\x37.google.cloud.dataproc.v1beta2.ReservationAffinity.TypeB\x03\xe0\x41\x01\x12\x10\n\x03key\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06values\x18\x03 \x03(\tB\x03\xe0\x41\x01"_\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eNO_RESERVATION\x10\x01\x12\x13\n\x0f\x41NY_RESERVATION\x10\x02\x12\x18\n\x14SPECIFIC_RESERVATION\x10\x03\x32\xe7\r\n\x11\x43lusterController\x12\x91\x02\n\rCreateCluster\x12\x33.google.cloud.dataproc.v1beta2.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\xab\x01\x82\xd3\xe4\x93\x02\x43"8/v1beta2/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x1bproject_id, region, cluster\xca\x41\x41\n\x07\x43luster\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xbb\x02\n\rUpdateCluster\x12\x33.google.cloud.dataproc.v1beta2.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\xd5\x01\x82\xd3\xe4\x93\x02R2G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xda\x41\x36project_id, region, cluster_name, cluster, update_mask\xca\x41\x41\n\x07\x43luster\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xaa\x02\n\rDeleteCluster\x12\x33.google.cloud.dataproc.v1beta2.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xc4\x01\x82\xd3\xe4\x93\x02I*G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41 project_id, region, cluster_name\xca\x41O\n\x15google.protobuf.Empty\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xda\x01\n\nGetCluster\x12\x30.google.cloud.dataproc.v1beta2.GetClusterRequest\x1a&.google.cloud.dataproc.v1beta2.Cluster"r\x82\xd3\xe4\x93\x02I\x12G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41 project_id, region, cluster_name\x12\xeb\x01\n\x0cListClusters\x12\x32.google.cloud.dataproc.v1beta2.ListClustersRequest\x1a\x33.google.cloud.dataproc.v1beta2.ListClustersResponse"r\x82\xd3\xe4\x93\x02:\x12\x38/v1beta2/projects/{project_id}/regions/{region}/clusters\xda\x41\x12project_id, region\xda\x41\x1aproject_id, region, filter\x12\xba\x02\n\x0f\x44iagnoseCluster\x12\x35.google.cloud.dataproc.v1beta2.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xd0\x01\x82\xd3\xe4\x93\x02U"P/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41 project_id, region, cluster_name\xca\x41O\n\x15google.protobuf.Empty\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB{\n!com.google.cloud.dataproc.v1beta2B\rClustersProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_operations__pb2.DESCRIPTOR, - google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_shared__pb2.DESCRIPTOR, - google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_CLUSTERSTATUS_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.cloud.dataproc.v1beta2.ClusterStatus.State", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="UNKNOWN", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CREATING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ERROR", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DELETING", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="UPDATING", index=5, number=5, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4509, - serialized_end=4595, -) -_sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_STATE) - -_CLUSTERSTATUS_SUBSTATE = _descriptor.EnumDescriptor( - name="Substate", - full_name="google.cloud.dataproc.v1beta2.ClusterStatus.Substate", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="UNSPECIFIED", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="UNHEALTHY", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="STALE_STATUS", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4597, - serialized_end=4657, -) -_sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_SUBSTATE) - -_RESERVATIONAFFINITY_TYPE = _descriptor.EnumDescriptor( - name="Type", - full_name="google.cloud.dataproc.v1beta2.ReservationAffinity.Type", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="NO_RESERVATION", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ANY_RESERVATION", - index=2, - number=2, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="SPECIFIC_RESERVATION", - index=3, - number=3, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=6489, - serialized_end=6584, -) -_sym_db.RegisterEnumDescriptor(_RESERVATIONAFFINITY_TYPE) - - -_CLUSTER_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.cloud.dataproc.v1beta2.Cluster.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.Cluster.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.Cluster.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=855, - serialized_end=900, -) - -_CLUSTER = _descriptor.Descriptor( - name="Cluster", - full_name="google.cloud.dataproc.v1beta2.Cluster", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.Cluster.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1beta2.Cluster.cluster_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="config", - full_name="google.cloud.dataproc.v1beta2.Cluster.config", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.cloud.dataproc.v1beta2.Cluster.labels", - index=3, - number=8, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status", - full_name="google.cloud.dataproc.v1beta2.Cluster.status", - index=4, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status_history", - full_name="google.cloud.dataproc.v1beta2.Cluster.status_history", - index=5, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_uuid", - full_name="google.cloud.dataproc.v1beta2.Cluster.cluster_uuid", - index=6, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metrics", - full_name="google.cloud.dataproc.v1beta2.Cluster.metrics", - index=7, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_CLUSTER_LABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=414, - serialized_end=900, -) - - -_CLUSTERCONFIG = _descriptor.Descriptor( - name="ClusterConfig", - full_name="google.cloud.dataproc.v1beta2.ClusterConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="config_bucket", - full_name="google.cloud.dataproc.v1beta2.ClusterConfig.config_bucket", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="gce_cluster_config", - full_name="google.cloud.dataproc.v1beta2.ClusterConfig.gce_cluster_config", - index=1, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="master_config", - full_name="google.cloud.dataproc.v1beta2.ClusterConfig.master_config", - index=2, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="worker_config", - full_name="google.cloud.dataproc.v1beta2.ClusterConfig.worker_config", - index=3, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="secondary_worker_config", - full_name="google.cloud.dataproc.v1beta2.ClusterConfig.secondary_worker_config", - index=4, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="software_config", - full_name="google.cloud.dataproc.v1beta2.ClusterConfig.software_config", - index=5, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="lifecycle_config", - full_name="google.cloud.dataproc.v1beta2.ClusterConfig.lifecycle_config", - index=6, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="initialization_actions", - full_name="google.cloud.dataproc.v1beta2.ClusterConfig.initialization_actions", - index=7, - number=11, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="encryption_config", - full_name="google.cloud.dataproc.v1beta2.ClusterConfig.encryption_config", - index=8, - number=15, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="autoscaling_config", - full_name="google.cloud.dataproc.v1beta2.ClusterConfig.autoscaling_config", - index=9, - number=16, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="endpoint_config", - full_name="google.cloud.dataproc.v1beta2.ClusterConfig.endpoint_config", - index=10, - number=17, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="security_config", - full_name="google.cloud.dataproc.v1beta2.ClusterConfig.security_config", - index=11, - number=18, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=903, - serialized_end=1846, -) - - -_ENDPOINTCONFIG_HTTPPORTSENTRY = _descriptor.Descriptor( - name="HttpPortsEntry", - full_name="google.cloud.dataproc.v1beta2.EndpointConfig.HttpPortsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.EndpointConfig.HttpPortsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.EndpointConfig.HttpPortsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1992, - serialized_end=2040, -) - -_ENDPOINTCONFIG = _descriptor.Descriptor( - name="EndpointConfig", - full_name="google.cloud.dataproc.v1beta2.EndpointConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="http_ports", - full_name="google.cloud.dataproc.v1beta2.EndpointConfig.http_ports", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="enable_http_port_access", - full_name="google.cloud.dataproc.v1beta2.EndpointConfig.enable_http_port_access", - index=1, - number=2, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_ENDPOINTCONFIG_HTTPPORTSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1849, - serialized_end=2040, -) - - -_AUTOSCALINGCONFIG = _descriptor.Descriptor( - name="AutoscalingConfig", - full_name="google.cloud.dataproc.v1beta2.AutoscalingConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="policy_uri", - full_name="google.cloud.dataproc.v1beta2.AutoscalingConfig.policy_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2042, - serialized_end=2086, -) - - -_ENCRYPTIONCONFIG = _descriptor.Descriptor( - name="EncryptionConfig", - full_name="google.cloud.dataproc.v1beta2.EncryptionConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="gce_pd_kms_key_name", - full_name="google.cloud.dataproc.v1beta2.EncryptionConfig.gce_pd_kms_key_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2088, - serialized_end=2140, -) - - -_GCECLUSTERCONFIG_METADATAENTRY = _descriptor.Descriptor( - name="MetadataEntry", - full_name="google.cloud.dataproc.v1beta2.GceClusterConfig.MetadataEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.GceClusterConfig.MetadataEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.GceClusterConfig.MetadataEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2521, - serialized_end=2568, -) - -_GCECLUSTERCONFIG = _descriptor.Descriptor( - name="GceClusterConfig", - full_name="google.cloud.dataproc.v1beta2.GceClusterConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="zone_uri", - full_name="google.cloud.dataproc.v1beta2.GceClusterConfig.zone_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="network_uri", - full_name="google.cloud.dataproc.v1beta2.GceClusterConfig.network_uri", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="subnetwork_uri", - full_name="google.cloud.dataproc.v1beta2.GceClusterConfig.subnetwork_uri", - index=2, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="internal_ip_only", - full_name="google.cloud.dataproc.v1beta2.GceClusterConfig.internal_ip_only", - index=3, - number=7, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_account", - full_name="google.cloud.dataproc.v1beta2.GceClusterConfig.service_account", - index=4, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_account_scopes", - full_name="google.cloud.dataproc.v1beta2.GceClusterConfig.service_account_scopes", - index=5, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="tags", - full_name="google.cloud.dataproc.v1beta2.GceClusterConfig.tags", - index=6, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metadata", - full_name="google.cloud.dataproc.v1beta2.GceClusterConfig.metadata", - index=7, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="reservation_affinity", - full_name="google.cloud.dataproc.v1beta2.GceClusterConfig.reservation_affinity", - index=8, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_GCECLUSTERCONFIG_METADATAENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2143, - serialized_end=2568, -) - - -_INSTANCEGROUPCONFIG = _descriptor.Descriptor( - name="InstanceGroupConfig", - full_name="google.cloud.dataproc.v1beta2.InstanceGroupConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="num_instances", - full_name="google.cloud.dataproc.v1beta2.InstanceGroupConfig.num_instances", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="instance_names", - full_name="google.cloud.dataproc.v1beta2.InstanceGroupConfig.instance_names", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="image_uri", - full_name="google.cloud.dataproc.v1beta2.InstanceGroupConfig.image_uri", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="machine_type_uri", - full_name="google.cloud.dataproc.v1beta2.InstanceGroupConfig.machine_type_uri", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="disk_config", - full_name="google.cloud.dataproc.v1beta2.InstanceGroupConfig.disk_config", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="is_preemptible", - full_name="google.cloud.dataproc.v1beta2.InstanceGroupConfig.is_preemptible", - index=5, - number=6, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="managed_group_config", - full_name="google.cloud.dataproc.v1beta2.InstanceGroupConfig.managed_group_config", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="accelerators", - full_name="google.cloud.dataproc.v1beta2.InstanceGroupConfig.accelerators", - index=7, - number=8, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="min_cpu_platform", - full_name="google.cloud.dataproc.v1beta2.InstanceGroupConfig.min_cpu_platform", - index=8, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2571, - serialized_end=2991, -) - - -_MANAGEDGROUPCONFIG = _descriptor.Descriptor( - name="ManagedGroupConfig", - full_name="google.cloud.dataproc.v1beta2.ManagedGroupConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="instance_template_name", - full_name="google.cloud.dataproc.v1beta2.ManagedGroupConfig.instance_template_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="instance_group_manager_name", - full_name="google.cloud.dataproc.v1beta2.ManagedGroupConfig.instance_group_manager_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2993, - serialized_end=3092, -) - - -_ACCELERATORCONFIG = _descriptor.Descriptor( - name="AcceleratorConfig", - full_name="google.cloud.dataproc.v1beta2.AcceleratorConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="accelerator_type_uri", - full_name="google.cloud.dataproc.v1beta2.AcceleratorConfig.accelerator_type_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="accelerator_count", - full_name="google.cloud.dataproc.v1beta2.AcceleratorConfig.accelerator_count", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3094, - serialized_end=3170, -) - - -_DISKCONFIG = _descriptor.Descriptor( - name="DiskConfig", - full_name="google.cloud.dataproc.v1beta2.DiskConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="boot_disk_type", - full_name="google.cloud.dataproc.v1beta2.DiskConfig.boot_disk_type", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="boot_disk_size_gb", - full_name="google.cloud.dataproc.v1beta2.DiskConfig.boot_disk_size_gb", - index=1, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="num_local_ssds", - full_name="google.cloud.dataproc.v1beta2.DiskConfig.num_local_ssds", - index=2, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3172, - serialized_end=3269, -) - - -_LIFECYCLECONFIG = _descriptor.Descriptor( - name="LifecycleConfig", - full_name="google.cloud.dataproc.v1beta2.LifecycleConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="idle_delete_ttl", - full_name="google.cloud.dataproc.v1beta2.LifecycleConfig.idle_delete_ttl", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="auto_delete_time", - full_name="google.cloud.dataproc.v1beta2.LifecycleConfig.auto_delete_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="auto_delete_ttl", - full_name="google.cloud.dataproc.v1beta2.LifecycleConfig.auto_delete_ttl", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="idle_start_time", - full_name="google.cloud.dataproc.v1beta2.LifecycleConfig.idle_start_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="ttl", - full_name="google.cloud.dataproc.v1beta2.LifecycleConfig.ttl", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=3272, - serialized_end=3521, -) - - -_SECURITYCONFIG = _descriptor.Descriptor( - name="SecurityConfig", - full_name="google.cloud.dataproc.v1beta2.SecurityConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="kerberos_config", - full_name="google.cloud.dataproc.v1beta2.SecurityConfig.kerberos_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3523, - serialized_end=3611, -) - - -_KERBEROSCONFIG = _descriptor.Descriptor( - name="KerberosConfig", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="enable_kerberos", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.enable_kerberos", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="root_principal_password_uri", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.root_principal_password_uri", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="kms_key_uri", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.kms_key_uri", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="keystore_uri", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.keystore_uri", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="truststore_uri", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.truststore_uri", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="keystore_password_uri", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.keystore_password_uri", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="key_password_uri", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.key_password_uri", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="truststore_password_uri", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.truststore_password_uri", - index=7, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cross_realm_trust_realm", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.cross_realm_trust_realm", - index=8, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cross_realm_trust_kdc", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.cross_realm_trust_kdc", - index=9, - number=10, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cross_realm_trust_admin_server", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.cross_realm_trust_admin_server", - index=10, - number=11, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cross_realm_trust_shared_password_uri", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.cross_realm_trust_shared_password_uri", - index=11, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="kdc_db_key_uri", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.kdc_db_key_uri", - index=12, - number=13, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="tgt_lifetime_hours", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.tgt_lifetime_hours", - index=13, - number=14, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="realm", - full_name="google.cloud.dataproc.v1beta2.KerberosConfig.realm", - index=14, - number=15, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3614, - serialized_end=4142, -) - - -_NODEINITIALIZATIONACTION = _descriptor.Descriptor( - name="NodeInitializationAction", - full_name="google.cloud.dataproc.v1beta2.NodeInitializationAction", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="executable_file", - full_name="google.cloud.dataproc.v1beta2.NodeInitializationAction.executable_file", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="execution_timeout", - full_name="google.cloud.dataproc.v1beta2.NodeInitializationAction.execution_timeout", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4144, - serialized_end=4259, -) - - -_CLUSTERSTATUS = _descriptor.Descriptor( - name="ClusterStatus", - full_name="google.cloud.dataproc.v1beta2.ClusterStatus", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.dataproc.v1beta2.ClusterStatus.state", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="detail", - full_name="google.cloud.dataproc.v1beta2.ClusterStatus.detail", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="state_start_time", - full_name="google.cloud.dataproc.v1beta2.ClusterStatus.state_start_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="substate", - full_name="google.cloud.dataproc.v1beta2.ClusterStatus.substate", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_CLUSTERSTATUS_STATE, _CLUSTERSTATUS_SUBSTATE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4262, - serialized_end=4657, -) - - -_SOFTWARECONFIG_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1beta2.SoftwareConfig.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.SoftwareConfig.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.SoftwareConfig.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4865, - serialized_end=4914, -) - -_SOFTWARECONFIG = _descriptor.Descriptor( - name="SoftwareConfig", - full_name="google.cloud.dataproc.v1beta2.SoftwareConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="image_version", - full_name="google.cloud.dataproc.v1beta2.SoftwareConfig.image_version", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1beta2.SoftwareConfig.properties", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="optional_components", - full_name="google.cloud.dataproc.v1beta2.SoftwareConfig.optional_components", - index=2, - number=3, - type=14, - cpp_type=8, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_SOFTWARECONFIG_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4660, - serialized_end=4914, -) - - -_CLUSTERMETRICS_HDFSMETRICSENTRY = _descriptor.Descriptor( - name="HdfsMetricsEntry", - full_name="google.cloud.dataproc.v1beta2.ClusterMetrics.HdfsMetricsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.ClusterMetrics.HdfsMetricsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.ClusterMetrics.HdfsMetricsEntry.value", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5107, - serialized_end=5157, -) - -_CLUSTERMETRICS_YARNMETRICSENTRY = _descriptor.Descriptor( - name="YarnMetricsEntry", - full_name="google.cloud.dataproc.v1beta2.ClusterMetrics.YarnMetricsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.ClusterMetrics.YarnMetricsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.ClusterMetrics.YarnMetricsEntry.value", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5159, - serialized_end=5209, -) - -_CLUSTERMETRICS = _descriptor.Descriptor( - name="ClusterMetrics", - full_name="google.cloud.dataproc.v1beta2.ClusterMetrics", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="hdfs_metrics", - full_name="google.cloud.dataproc.v1beta2.ClusterMetrics.hdfs_metrics", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="yarn_metrics", - full_name="google.cloud.dataproc.v1beta2.ClusterMetrics.yarn_metrics", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_CLUSTERMETRICS_HDFSMETRICSENTRY, _CLUSTERMETRICS_YARNMETRICSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4917, - serialized_end=5209, -) - - -_CREATECLUSTERREQUEST = _descriptor.Descriptor( - name="CreateClusterRequest", - full_name="google.cloud.dataproc.v1beta2.CreateClusterRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.CreateClusterRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1beta2.CreateClusterRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster", - full_name="google.cloud.dataproc.v1beta2.CreateClusterRequest.cluster", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_id", - full_name="google.cloud.dataproc.v1beta2.CreateClusterRequest.request_id", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5212, - serialized_end=5367, -) - - -_UPDATECLUSTERREQUEST = _descriptor.Descriptor( - name="UpdateClusterRequest", - full_name="google.cloud.dataproc.v1beta2.UpdateClusterRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.UpdateClusterRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1beta2.UpdateClusterRequest.region", - index=1, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1beta2.UpdateClusterRequest.cluster_name", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster", - full_name="google.cloud.dataproc.v1beta2.UpdateClusterRequest.cluster", - index=3, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="graceful_decommission_timeout", - full_name="google.cloud.dataproc.v1beta2.UpdateClusterRequest.graceful_decommission_timeout", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.dataproc.v1beta2.UpdateClusterRequest.update_mask", - index=5, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_id", - full_name="google.cloud.dataproc.v1beta2.UpdateClusterRequest.request_id", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5370, - serialized_end=5677, -) - - -_DELETECLUSTERREQUEST = _descriptor.Descriptor( - name="DeleteClusterRequest", - full_name="google.cloud.dataproc.v1beta2.DeleteClusterRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.DeleteClusterRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1beta2.DeleteClusterRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1beta2.DeleteClusterRequest.cluster_name", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_uuid", - full_name="google.cloud.dataproc.v1beta2.DeleteClusterRequest.cluster_uuid", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_id", - full_name="google.cloud.dataproc.v1beta2.DeleteClusterRequest.request_id", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5680, - serialized_end=5827, -) - - -_GETCLUSTERREQUEST = _descriptor.Descriptor( - name="GetClusterRequest", - full_name="google.cloud.dataproc.v1beta2.GetClusterRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.GetClusterRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1beta2.GetClusterRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1beta2.GetClusterRequest.cluster_name", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5829, - serialized_end=5921, -) - - -_LISTCLUSTERSREQUEST = _descriptor.Descriptor( - name="ListClustersRequest", - full_name="google.cloud.dataproc.v1beta2.ListClustersRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.ListClustersRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1beta2.ListClustersRequest.region", - index=1, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.dataproc.v1beta2.ListClustersRequest.filter", - index=2, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.dataproc.v1beta2.ListClustersRequest.page_size", - index=3, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.dataproc.v1beta2.ListClustersRequest.page_token", - index=4, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5924, - serialized_end=6061, -) - - -_LISTCLUSTERSRESPONSE = _descriptor.Descriptor( - name="ListClustersResponse", - full_name="google.cloud.dataproc.v1beta2.ListClustersResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="clusters", - full_name="google.cloud.dataproc.v1beta2.ListClustersResponse.clusters", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.dataproc.v1beta2.ListClustersResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6063, - serialized_end=6178, -) - - -_DIAGNOSECLUSTERREQUEST = _descriptor.Descriptor( - name="DiagnoseClusterRequest", - full_name="google.cloud.dataproc.v1beta2.DiagnoseClusterRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.DiagnoseClusterRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1beta2.DiagnoseClusterRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1beta2.DiagnoseClusterRequest.cluster_name", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6180, - serialized_end=6277, -) - - -_DIAGNOSECLUSTERRESULTS = _descriptor.Descriptor( - name="DiagnoseClusterResults", - full_name="google.cloud.dataproc.v1beta2.DiagnoseClusterResults", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="output_uri", - full_name="google.cloud.dataproc.v1beta2.DiagnoseClusterResults.output_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6279, - serialized_end=6328, -) - - -_RESERVATIONAFFINITY = _descriptor.Descriptor( - name="ReservationAffinity", - full_name="google.cloud.dataproc.v1beta2.ReservationAffinity", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="consume_reservation_type", - full_name="google.cloud.dataproc.v1beta2.ReservationAffinity.consume_reservation_type", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.ReservationAffinity.key", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="values", - full_name="google.cloud.dataproc.v1beta2.ReservationAffinity.values", - index=2, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_RESERVATIONAFFINITY_TYPE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6331, - serialized_end=6584, -) - -_CLUSTER_LABELSENTRY.containing_type = _CLUSTER -_CLUSTER.fields_by_name["config"].message_type = _CLUSTERCONFIG -_CLUSTER.fields_by_name["labels"].message_type = _CLUSTER_LABELSENTRY -_CLUSTER.fields_by_name["status"].message_type = _CLUSTERSTATUS -_CLUSTER.fields_by_name["status_history"].message_type = _CLUSTERSTATUS -_CLUSTER.fields_by_name["metrics"].message_type = _CLUSTERMETRICS -_CLUSTERCONFIG.fields_by_name["gce_cluster_config"].message_type = _GCECLUSTERCONFIG -_CLUSTERCONFIG.fields_by_name["master_config"].message_type = _INSTANCEGROUPCONFIG -_CLUSTERCONFIG.fields_by_name["worker_config"].message_type = _INSTANCEGROUPCONFIG -_CLUSTERCONFIG.fields_by_name[ - "secondary_worker_config" -].message_type = _INSTANCEGROUPCONFIG -_CLUSTERCONFIG.fields_by_name["software_config"].message_type = _SOFTWARECONFIG -_CLUSTERCONFIG.fields_by_name["lifecycle_config"].message_type = _LIFECYCLECONFIG -_CLUSTERCONFIG.fields_by_name[ - "initialization_actions" -].message_type = _NODEINITIALIZATIONACTION -_CLUSTERCONFIG.fields_by_name["encryption_config"].message_type = _ENCRYPTIONCONFIG -_CLUSTERCONFIG.fields_by_name["autoscaling_config"].message_type = _AUTOSCALINGCONFIG -_CLUSTERCONFIG.fields_by_name["endpoint_config"].message_type = _ENDPOINTCONFIG -_CLUSTERCONFIG.fields_by_name["security_config"].message_type = _SECURITYCONFIG -_ENDPOINTCONFIG_HTTPPORTSENTRY.containing_type = _ENDPOINTCONFIG -_ENDPOINTCONFIG.fields_by_name[ - "http_ports" -].message_type = _ENDPOINTCONFIG_HTTPPORTSENTRY -_GCECLUSTERCONFIG_METADATAENTRY.containing_type = _GCECLUSTERCONFIG -_GCECLUSTERCONFIG.fields_by_name[ - "metadata" -].message_type = _GCECLUSTERCONFIG_METADATAENTRY -_GCECLUSTERCONFIG.fields_by_name[ - "reservation_affinity" -].message_type = _RESERVATIONAFFINITY -_INSTANCEGROUPCONFIG.fields_by_name["disk_config"].message_type = _DISKCONFIG -_INSTANCEGROUPCONFIG.fields_by_name[ - "managed_group_config" -].message_type = _MANAGEDGROUPCONFIG -_INSTANCEGROUPCONFIG.fields_by_name["accelerators"].message_type = _ACCELERATORCONFIG -_LIFECYCLECONFIG.fields_by_name[ - "idle_delete_ttl" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_LIFECYCLECONFIG.fields_by_name[ - "auto_delete_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LIFECYCLECONFIG.fields_by_name[ - "auto_delete_ttl" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_LIFECYCLECONFIG.fields_by_name[ - "idle_start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LIFECYCLECONFIG.oneofs_by_name["ttl"].fields.append( - _LIFECYCLECONFIG.fields_by_name["auto_delete_time"] -) -_LIFECYCLECONFIG.fields_by_name[ - "auto_delete_time" -].containing_oneof = _LIFECYCLECONFIG.oneofs_by_name["ttl"] -_LIFECYCLECONFIG.oneofs_by_name["ttl"].fields.append( - _LIFECYCLECONFIG.fields_by_name["auto_delete_ttl"] -) -_LIFECYCLECONFIG.fields_by_name[ - "auto_delete_ttl" -].containing_oneof = _LIFECYCLECONFIG.oneofs_by_name["ttl"] -_SECURITYCONFIG.fields_by_name["kerberos_config"].message_type = _KERBEROSCONFIG -_NODEINITIALIZATIONACTION.fields_by_name[ - "execution_timeout" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_CLUSTERSTATUS.fields_by_name["state"].enum_type = _CLUSTERSTATUS_STATE -_CLUSTERSTATUS.fields_by_name[ - "state_start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_CLUSTERSTATUS.fields_by_name["substate"].enum_type = _CLUSTERSTATUS_SUBSTATE -_CLUSTERSTATUS_STATE.containing_type = _CLUSTERSTATUS -_CLUSTERSTATUS_SUBSTATE.containing_type = _CLUSTERSTATUS -_SOFTWARECONFIG_PROPERTIESENTRY.containing_type = _SOFTWARECONFIG -_SOFTWARECONFIG.fields_by_name[ - "properties" -].message_type = _SOFTWARECONFIG_PROPERTIESENTRY -_SOFTWARECONFIG.fields_by_name[ - "optional_components" -].enum_type = ( - google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_shared__pb2._COMPONENT -) -_CLUSTERMETRICS_HDFSMETRICSENTRY.containing_type = _CLUSTERMETRICS -_CLUSTERMETRICS_YARNMETRICSENTRY.containing_type = _CLUSTERMETRICS -_CLUSTERMETRICS.fields_by_name[ - "hdfs_metrics" -].message_type = _CLUSTERMETRICS_HDFSMETRICSENTRY -_CLUSTERMETRICS.fields_by_name[ - "yarn_metrics" -].message_type = _CLUSTERMETRICS_YARNMETRICSENTRY -_CREATECLUSTERREQUEST.fields_by_name["cluster"].message_type = _CLUSTER -_UPDATECLUSTERREQUEST.fields_by_name["cluster"].message_type = _CLUSTER -_UPDATECLUSTERREQUEST.fields_by_name[ - "graceful_decommission_timeout" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_UPDATECLUSTERREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTCLUSTERSRESPONSE.fields_by_name["clusters"].message_type = _CLUSTER -_RESERVATIONAFFINITY.fields_by_name[ - "consume_reservation_type" -].enum_type = _RESERVATIONAFFINITY_TYPE -_RESERVATIONAFFINITY_TYPE.containing_type = _RESERVATIONAFFINITY -DESCRIPTOR.message_types_by_name["Cluster"] = _CLUSTER -DESCRIPTOR.message_types_by_name["ClusterConfig"] = _CLUSTERCONFIG -DESCRIPTOR.message_types_by_name["EndpointConfig"] = _ENDPOINTCONFIG -DESCRIPTOR.message_types_by_name["AutoscalingConfig"] = _AUTOSCALINGCONFIG -DESCRIPTOR.message_types_by_name["EncryptionConfig"] = _ENCRYPTIONCONFIG -DESCRIPTOR.message_types_by_name["GceClusterConfig"] = _GCECLUSTERCONFIG -DESCRIPTOR.message_types_by_name["InstanceGroupConfig"] = _INSTANCEGROUPCONFIG -DESCRIPTOR.message_types_by_name["ManagedGroupConfig"] = _MANAGEDGROUPCONFIG -DESCRIPTOR.message_types_by_name["AcceleratorConfig"] = _ACCELERATORCONFIG -DESCRIPTOR.message_types_by_name["DiskConfig"] = _DISKCONFIG -DESCRIPTOR.message_types_by_name["LifecycleConfig"] = _LIFECYCLECONFIG -DESCRIPTOR.message_types_by_name["SecurityConfig"] = _SECURITYCONFIG -DESCRIPTOR.message_types_by_name["KerberosConfig"] = _KERBEROSCONFIG -DESCRIPTOR.message_types_by_name["NodeInitializationAction"] = _NODEINITIALIZATIONACTION -DESCRIPTOR.message_types_by_name["ClusterStatus"] = _CLUSTERSTATUS -DESCRIPTOR.message_types_by_name["SoftwareConfig"] = _SOFTWARECONFIG -DESCRIPTOR.message_types_by_name["ClusterMetrics"] = _CLUSTERMETRICS -DESCRIPTOR.message_types_by_name["CreateClusterRequest"] = _CREATECLUSTERREQUEST -DESCRIPTOR.message_types_by_name["UpdateClusterRequest"] = _UPDATECLUSTERREQUEST -DESCRIPTOR.message_types_by_name["DeleteClusterRequest"] = _DELETECLUSTERREQUEST -DESCRIPTOR.message_types_by_name["GetClusterRequest"] = _GETCLUSTERREQUEST -DESCRIPTOR.message_types_by_name["ListClustersRequest"] = _LISTCLUSTERSREQUEST -DESCRIPTOR.message_types_by_name["ListClustersResponse"] = _LISTCLUSTERSRESPONSE -DESCRIPTOR.message_types_by_name["DiagnoseClusterRequest"] = _DIAGNOSECLUSTERREQUEST -DESCRIPTOR.message_types_by_name["DiagnoseClusterResults"] = _DIAGNOSECLUSTERRESULTS -DESCRIPTOR.message_types_by_name["ReservationAffinity"] = _RESERVATIONAFFINITY -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Cluster = _reflection.GeneratedProtocolMessageType( - "Cluster", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTER_LABELSENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.Cluster.LabelsEntry) - ), - ), - DESCRIPTOR=_CLUSTER, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Describes the identifying information, config, and status - of a cluster of Compute Engine instances. - - - Attributes: - project_id: - Required. The Google Cloud Platform project ID that the - cluster belongs to. - cluster_name: - Required. The cluster name. Cluster names within a project - must be unique. Names of deleted clusters can be reused. - config: - Required. The cluster config. Note that Cloud Dataproc may set - default values, and values may change when clusters are - updated. - labels: - Optional. The labels to associate with this cluster. Label - **keys** must contain 1 to 63 characters, and must conform to - `RFC 1035 `__. Label - **values** may be empty, but, if present, must contain 1 to 63 - characters, and must conform to `RFC 1035 - `__. No more than 32 - labels can be associated with a cluster. - status: - Output only. Cluster status. - status_history: - Output only. The previous cluster status. - cluster_uuid: - Output only. A cluster UUID (Unique Universal Identifier). - Cloud Dataproc generates this value when it creates the - cluster. - metrics: - Output only. Contains cluster daemon metrics such as HDFS and - YARN stats. **Beta Feature**: This report is available for - testing purposes only. It may be changed before final release. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.Cluster) - ), -) -_sym_db.RegisterMessage(Cluster) -_sym_db.RegisterMessage(Cluster.LabelsEntry) - -ClusterConfig = _reflection.GeneratedProtocolMessageType( - "ClusterConfig", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTERCONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""The cluster config. - - - Attributes: - config_bucket: - Optional. A Google Cloud Storage bucket used to stage job - dependencies, config files, and job driver console output. If - you do not specify a staging bucket, Cloud Dataproc will - determine a Cloud Storage location (US, ASIA, or EU) for your - cluster's staging bucket according to the Google Compute - Engine zone where your cluster is deployed, and then create - and manage this project-level, per-location bucket (see `Cloud - Dataproc staging bucket `__). - gce_cluster_config: - Optional. The shared Compute Engine config settings for all - instances in a cluster. - master_config: - Optional. The Compute Engine config settings for the master - instance in a cluster. - worker_config: - Optional. The Compute Engine config settings for worker - instances in a cluster. - secondary_worker_config: - Optional. The Compute Engine config settings for additional - worker instances in a cluster. - software_config: - Optional. The config settings for software inside the cluster. - lifecycle_config: - Optional. The config setting for auto delete cluster schedule. - initialization_actions: - Optional. Commands to execute on each node after config is - completed. By default, executables are run on master and all - worker nodes. You can test a node's role metadata to run an - executable on a master or worker node, as shown below using - ``curl`` (you can also use ``wget``): :: ROLE=$(curl -H - Metadata-Flavor:Google http://metadata/computeMetadata/v1b - eta2/instance/attributes/dataproc-role) if [[ "${ROLE}" == - 'Master' ]]; then ... master specific actions ... - else ... worker specific actions ... fi - encryption_config: - Optional. Encryption settings for the cluster. - autoscaling_config: - Optional. Autoscaling config for the policy associated with - the cluster. Cluster does not autoscale if this field is - unset. - endpoint_config: - Optional. Port/endpoint configuration for this cluster - security_config: - Optional. Security related configuration. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ClusterConfig) - ), -) -_sym_db.RegisterMessage(ClusterConfig) - -EndpointConfig = _reflection.GeneratedProtocolMessageType( - "EndpointConfig", - (_message.Message,), - dict( - HttpPortsEntry=_reflection.GeneratedProtocolMessageType( - "HttpPortsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_ENDPOINTCONFIG_HTTPPORTSENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.EndpointConfig.HttpPortsEntry) - ), - ), - DESCRIPTOR=_ENDPOINTCONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Endpoint config for this cluster - - - Attributes: - http_ports: - Output only. The map of port descriptions to URLs. Will only - be populated if enable\_http\_port\_access is true. - enable_http_port_access: - Optional. If true, enable http access to specific ports on the - cluster from external sources. Defaults to false. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.EndpointConfig) - ), -) -_sym_db.RegisterMessage(EndpointConfig) -_sym_db.RegisterMessage(EndpointConfig.HttpPortsEntry) - -AutoscalingConfig = _reflection.GeneratedProtocolMessageType( - "AutoscalingConfig", - (_message.Message,), - dict( - DESCRIPTOR=_AUTOSCALINGCONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Autoscaling Policy config associated with the cluster. - - - Attributes: - policy_uri: - Optional. The autoscaling policy used by the cluster. Only - resource names including projectid and location (region) are - valid. Examples: - ``https://www.googleapis.com/compute/v1/p - rojects/[project_id]/locations/[dataproc_region]/autoscalingPo - licies/[policy_id]`` - ``projects/[project_id]/locations/[dat - aproc_region]/autoscalingPolicies/[policy_id]`` Note that the - policy must be in the same project and Cloud Dataproc region. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.AutoscalingConfig) - ), -) -_sym_db.RegisterMessage(AutoscalingConfig) - -EncryptionConfig = _reflection.GeneratedProtocolMessageType( - "EncryptionConfig", - (_message.Message,), - dict( - DESCRIPTOR=_ENCRYPTIONCONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Encryption settings for the cluster. - - - Attributes: - gce_pd_kms_key_name: - Optional. The Cloud KMS key name to use for PD disk encryption - for all instances in the cluster. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.EncryptionConfig) - ), -) -_sym_db.RegisterMessage(EncryptionConfig) - -GceClusterConfig = _reflection.GeneratedProtocolMessageType( - "GceClusterConfig", - (_message.Message,), - dict( - MetadataEntry=_reflection.GeneratedProtocolMessageType( - "MetadataEntry", - (_message.Message,), - dict( - DESCRIPTOR=_GCECLUSTERCONFIG_METADATAENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.GceClusterConfig.MetadataEntry) - ), - ), - DESCRIPTOR=_GCECLUSTERCONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Common config settings for resources of Compute Engine - cluster instances, applicable to all instances in the cluster. - - - Attributes: - zone_uri: - Optional. The zone where the Compute Engine cluster will be - located. On a create request, it is required in the "global" - region. If omitted in a non-global Cloud Dataproc region, the - service will pick a zone in the corresponding Compute Engine - region. On a get request, zone will always be present. A full - URL, partial URI, or short name are valid. Examples: - ``htt - ps://www.googleapis.com/compute/v1/projects/[project_id]/zones - /[zone]`` - ``projects/[project_id]/zones/[zone]`` - ``us- - central1-f`` - network_uri: - Optional. The Compute Engine network to be used for machine - communications. Cannot be specified with subnetwork\_uri. If - neither ``network_uri`` nor ``subnetwork_uri`` is specified, - the "default" network of the project is used, if it exists. - Cannot be a "Custom Subnet Network" (see `Using Subnetworks - `__ for more information). A full - URL, partial URI, or short name are valid. Examples: - ``htt - ps://www.googleapis.com/compute/v1/projects/[project_id]/regio - ns/global/default`` - - ``projects/[project_id]/regions/global/default`` - - ``default`` - subnetwork_uri: - Optional. The Compute Engine subnetwork to be used for machine - communications. Cannot be specified with network\_uri. A full - URL, partial URI, or short name are valid. Examples: - ``htt - ps://www.googleapis.com/compute/v1/projects/[project_id]/regio - ns/us-east1/subnetworks/sub0`` - - ``projects/[project_id]/regions/us-east1/subnetworks/sub0`` - - ``sub0`` - internal_ip_only: - Optional. If true, all instances in the cluster will only have - internal IP addresses. By default, clusters are not restricted - to internal IP addresses, and will have ephemeral external IP - addresses assigned to each instance. This ``internal_ip_only`` - restriction can only be enabled for subnetwork enabled - networks, and all off-cluster dependencies must be configured - to be accessible without external IP addresses. - service_account: - Optional. The service account of the instances. Defaults to - the default Compute Engine service account. Custom service - accounts need permissions equivalent to the following IAM - roles: - roles/logging.logWriter - - roles/storage.objectAdmin (see - https://cloud.google.com/compute/docs/access/service- - accounts#custom\_service\_accounts for more information). - Example: ``[account_id]@[project_id].iam.gserviceaccount.com`` - service_account_scopes: - Optional. The URIs of service account scopes to be included in - Compute Engine instances. The following base set of scopes is - always included: - - https://www.googleapis.com/auth/cloud.useraccounts.readonly - - https://www.googleapis.com/auth/devstorage.read\_write - - https://www.googleapis.com/auth/logging.write If no scopes - are specified, the following defaults are also provided: - - https://www.googleapis.com/auth/bigquery - - https://www.googleapis.com/auth/bigtable.admin.table - - https://www.googleapis.com/auth/bigtable.data - - https://www.googleapis.com/auth/devstorage.full\_control - tags: - The Compute Engine tags to add to all instances (see `Tagging - instances `__). - metadata: - The Compute Engine metadata entries to add to all instances - (see `Project and instance metadata - `__). - reservation_affinity: - Optional. Reservation Affinity for consuming Zonal - reservation. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.GceClusterConfig) - ), -) -_sym_db.RegisterMessage(GceClusterConfig) -_sym_db.RegisterMessage(GceClusterConfig.MetadataEntry) - -InstanceGroupConfig = _reflection.GeneratedProtocolMessageType( - "InstanceGroupConfig", - (_message.Message,), - dict( - DESCRIPTOR=_INSTANCEGROUPCONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""The config settings for Compute Engine resources in an - instance group, such as a master or worker group. - - - Attributes: - num_instances: - Optional. The number of VM instances in the instance group. - For master instance groups, must be set to 1. - instance_names: - Output only. The list of instance names. Cloud Dataproc - derives the names from ``cluster_name``, ``num_instances``, - and the instance group. - image_uri: - Optional. The Compute Engine image resource used for cluster - instances. It can be specified or may be inferred from - ``SoftwareConfig.image_version``. - machine_type_uri: - Optional. The Compute Engine machine type used for cluster - instances. A full URL, partial URI, or short name are valid. - Examples: - ``https://www.googleapis.com/compute/v1/projects - /[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`` - - ``projects/[project_id]/zones/us- - east1-a/machineTypes/n1-standard-2`` - ``n1-standard-2`` - **Auto Zone Exception**: If you are using the Cloud Dataproc - `Auto Zone Placement `__ feature, you - must use the short name of the machine type resource, for - example, ``n1-standard-2``. - disk_config: - Optional. Disk option config settings. - is_preemptible: - Optional. Specifies that this instance group contains - preemptible instances. - managed_group_config: - Output only. The config for Compute Engine Instance Group - Manager that manages this group. This is only used for - preemptible instance groups. - accelerators: - Optional. The Compute Engine accelerator configuration for - these instances. - min_cpu_platform: - Specifies the minimum cpu platform for the Instance Group. See - [Cloud Dataproc→Minimum CPU Platform] - (/dataproc/docs/concepts/compute/dataproc-min-cpu). - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.InstanceGroupConfig) - ), -) -_sym_db.RegisterMessage(InstanceGroupConfig) - -ManagedGroupConfig = _reflection.GeneratedProtocolMessageType( - "ManagedGroupConfig", - (_message.Message,), - dict( - DESCRIPTOR=_MANAGEDGROUPCONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Specifies the resources used to actively manage an - instance group. - - - Attributes: - instance_template_name: - Output only. The name of the Instance Template used for the - Managed Instance Group. - instance_group_manager_name: - Output only. The name of the Instance Group Manager for this - group. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ManagedGroupConfig) - ), -) -_sym_db.RegisterMessage(ManagedGroupConfig) - -AcceleratorConfig = _reflection.GeneratedProtocolMessageType( - "AcceleratorConfig", - (_message.Message,), - dict( - DESCRIPTOR=_ACCELERATORCONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Specifies the type and number of accelerator cards - attached to the instances of an instance group (see `GPUs on Compute - Engine `__). - - - Attributes: - accelerator_type_uri: - Full URL, partial URI, or short name of the accelerator type - resource to expose to this instance. See `Compute Engine - AcceleratorTypes - `__ Examples - \* ``https://www.googleapis.com/compute/beta/projects/[project - _id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`` \* - ``projects/[project_id]/zones/us- - east1-a/acceleratorTypes/nvidia-tesla-k80`` \* ``nvidia- - tesla-k80`` **Auto Zone Exception**: If you are using the - Cloud Dataproc `Auto Zone Placement - `__ feature, you must use the - short name of the accelerator type resource, for example, - ``nvidia-tesla-k80``. - accelerator_count: - The number of the accelerator cards of this type exposed to - this instance. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.AcceleratorConfig) - ), -) -_sym_db.RegisterMessage(AcceleratorConfig) - -DiskConfig = _reflection.GeneratedProtocolMessageType( - "DiskConfig", - (_message.Message,), - dict( - DESCRIPTOR=_DISKCONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Specifies the config of disk options for a group of VM - instances. - - - Attributes: - boot_disk_type: - Optional. Type of the boot disk (default is "pd-standard"). - Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or - "pd-standard" (Persistent Disk Hard Disk Drive). - boot_disk_size_gb: - Optional. Size in GB of the boot disk (default is 500GB). - num_local_ssds: - Number of attached SSDs, from 0 to 4 (default is 0). If SSDs - are not attached, the boot disk is used to store runtime logs - and `HDFS `__ data. If one or more SSDs are attached, this - runtime bulk data is spread across them, and the boot disk - contains only basic config and installed binaries. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.DiskConfig) - ), -) -_sym_db.RegisterMessage(DiskConfig) - -LifecycleConfig = _reflection.GeneratedProtocolMessageType( - "LifecycleConfig", - (_message.Message,), - dict( - DESCRIPTOR=_LIFECYCLECONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Specifies the cluster auto-delete schedule configuration. - - - Attributes: - idle_delete_ttl: - Optional. The duration to keep the cluster alive while idling. - Passing this threshold will cause the cluster to be deleted. - Valid range: **[10m, 14d]**. Example: **"10m"**, the minimum - value, to delete the cluster when it has had no jobs running - for 10 minutes. - ttl: - Either the exact time the cluster should be deleted at or the - cluster maximum age. - auto_delete_time: - Optional. The time when cluster will be auto-deleted. - auto_delete_ttl: - Optional. The lifetime duration of cluster. The cluster will - be auto-deleted at the end of this period. Valid range: - **[10m, 14d]**. Example: **"1d"**, to delete the cluster 1 - day after its creation.. - idle_start_time: - Output only. The time when cluster became idle (most recent - job finished) and became eligible for deletion due to - idleness. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.LifecycleConfig) - ), -) -_sym_db.RegisterMessage(LifecycleConfig) - -SecurityConfig = _reflection.GeneratedProtocolMessageType( - "SecurityConfig", - (_message.Message,), - dict( - DESCRIPTOR=_SECURITYCONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Security related configuration, including encryption, - Kerberos, etc. - - - Attributes: - kerberos_config: - Kerberos related configuration. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SecurityConfig) - ), -) -_sym_db.RegisterMessage(SecurityConfig) - -KerberosConfig = _reflection.GeneratedProtocolMessageType( - "KerberosConfig", - (_message.Message,), - dict( - DESCRIPTOR=_KERBEROSCONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Specifies Kerberos related configuration. - - - Attributes: - enable_kerberos: - Optional. Flag to indicate whether to Kerberize the cluster. - root_principal_password_uri: - Required. The Cloud Storage URI of a KMS encrypted file - containing the root principal password. - kms_key_uri: - Required. The uri of the KMS key used to encrypt various - sensitive files. - keystore_uri: - Optional. The Cloud Storage URI of the keystore file used for - SSL encryption. If not provided, Dataproc will provide a self- - signed certificate. - truststore_uri: - Optional. The Cloud Storage URI of the truststore file used - for SSL encryption. If not provided, Dataproc will provide a - self-signed certificate. - keystore_password_uri: - Optional. The Cloud Storage URI of a KMS encrypted file - containing the password to the user provided keystore. For the - self-signed certificate, this password is generated by - Dataproc. - key_password_uri: - Optional. The Cloud Storage URI of a KMS encrypted file - containing the password to the user provided key. For the - self-signed certificate, this password is generated by - Dataproc. - truststore_password_uri: - Optional. The Cloud Storage URI of a KMS encrypted file - containing the password to the user provided truststore. For - the self-signed certificate, this password is generated by - Dataproc. - cross_realm_trust_realm: - Optional. The remote realm the Dataproc on-cluster KDC will - trust, should the user enable cross realm trust. - cross_realm_trust_kdc: - Optional. The KDC (IP or hostname) for the remote trusted - realm in a cross realm trust relationship. - cross_realm_trust_admin_server: - Optional. The admin server (IP or hostname) for the remote - trusted realm in a cross realm trust relationship. - cross_realm_trust_shared_password_uri: - Optional. The Cloud Storage URI of a KMS encrypted file - containing the shared password between the on-cluster Kerberos - realm and the remote trusted realm, in a cross realm trust - relationship. - kdc_db_key_uri: - Optional. The Cloud Storage URI of a KMS encrypted file - containing the master key of the KDC database. - tgt_lifetime_hours: - Optional. The lifetime of the ticket granting ticket, in - hours. If not specified, or user specifies 0, then default - value 10 will be used. - realm: - Optional. The name of the on-cluster Kerberos realm. If not - specified, the uppercased domain of hostnames will be the - realm. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.KerberosConfig) - ), -) -_sym_db.RegisterMessage(KerberosConfig) - -NodeInitializationAction = _reflection.GeneratedProtocolMessageType( - "NodeInitializationAction", - (_message.Message,), - dict( - DESCRIPTOR=_NODEINITIALIZATIONACTION, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Specifies an executable to run on a fully configured node - and a timeout period for executable completion. - - - Attributes: - executable_file: - Required. Cloud Storage URI of executable file. - execution_timeout: - Optional. Amount of time executable has to complete. Default - is 10 minutes. Cluster creation fails with an explanatory - error message (the name of the executable that caused the - error and the exceeded timeout period) if the executable is - not completed at end of the timeout period. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.NodeInitializationAction) - ), -) -_sym_db.RegisterMessage(NodeInitializationAction) - -ClusterStatus = _reflection.GeneratedProtocolMessageType( - "ClusterStatus", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTERSTATUS, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""The status of a cluster and its instances. - - - Attributes: - state: - Output only. The cluster's state. - detail: - Output only. Optional details of cluster's state. - state_start_time: - Output only. Time when this state was entered. - substate: - Output only. Additional state information that includes status - reported by the agent. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ClusterStatus) - ), -) -_sym_db.RegisterMessage(ClusterStatus) - -SoftwareConfig = _reflection.GeneratedProtocolMessageType( - "SoftwareConfig", - (_message.Message,), - dict( - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_SOFTWARECONFIG_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SoftwareConfig.PropertiesEntry) - ), - ), - DESCRIPTOR=_SOFTWARECONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Specifies the selection and config of software inside the - cluster. - - - Attributes: - image_version: - Optional. The version of software inside the cluster. It must - be one of the supported `Cloud Dataproc Versions - `__, such as "1.2" - (including a subminor version, such as "1.2.29"), or the - `"preview" version - `__. If unspecified, it defaults to - the latest Debian version. - properties: - Optional. The properties to set on daemon config files. - Property keys are specified in ``prefix:property`` format, for - example ``core:hadoop.tmp.dir``. The following are supported - prefixes and their mappings: - capacity-scheduler: - ``capacity-scheduler.xml`` - core: ``core-site.xml`` - - distcp: ``distcp-default.xml`` - hdfs: ``hdfs-site.xml`` - - hive: ``hive-site.xml`` - mapred: ``mapred-site.xml`` - pig: - ``pig.properties`` - spark: ``spark-defaults.conf`` - yarn: - ``yarn-site.xml`` For more information, see `Cluster - properties `__. - optional_components: - The set of optional components to activate on the cluster. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SoftwareConfig) - ), -) -_sym_db.RegisterMessage(SoftwareConfig) -_sym_db.RegisterMessage(SoftwareConfig.PropertiesEntry) - -ClusterMetrics = _reflection.GeneratedProtocolMessageType( - "ClusterMetrics", - (_message.Message,), - dict( - HdfsMetricsEntry=_reflection.GeneratedProtocolMessageType( - "HdfsMetricsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTERMETRICS_HDFSMETRICSENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ClusterMetrics.HdfsMetricsEntry) - ), - ), - YarnMetricsEntry=_reflection.GeneratedProtocolMessageType( - "YarnMetricsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTERMETRICS_YARNMETRICSENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ClusterMetrics.YarnMetricsEntry) - ), - ), - DESCRIPTOR=_CLUSTERMETRICS, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Contains cluster daemon metrics, such as HDFS and YARN - stats. - - **Beta Feature**: This report is available for testing purposes only. It - may be changed before final release. - - - Attributes: - hdfs_metrics: - The HDFS metrics. - yarn_metrics: - The YARN metrics. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ClusterMetrics) - ), -) -_sym_db.RegisterMessage(ClusterMetrics) -_sym_db.RegisterMessage(ClusterMetrics.HdfsMetricsEntry) -_sym_db.RegisterMessage(ClusterMetrics.YarnMetricsEntry) - -CreateClusterRequest = _reflection.GeneratedProtocolMessageType( - "CreateClusterRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATECLUSTERREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""A request to create a cluster. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - cluster belongs to. - region: - Required. The Cloud Dataproc region in which to handle the - request. - cluster: - Required. The cluster to create. - request_id: - Optional. A unique id used to identify the request. If the - server receives two [CreateClusterRequest][google.cloud.datapr - oc.v1beta2.CreateClusterRequest] requests with the same id, - then the second request will be ignored and the first - [google.longrunning.Operation][google.longrunning.Operation] - created and stored in the backend is returned. It is - recommended to always set this value to a `UUID `__. The id - must contain only letters (a-z, A-Z), numbers (0-9), - underscores (\_), and hyphens (-). The maximum length is 40 - characters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.CreateClusterRequest) - ), -) -_sym_db.RegisterMessage(CreateClusterRequest) - -UpdateClusterRequest = _reflection.GeneratedProtocolMessageType( - "UpdateClusterRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATECLUSTERREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""A request to update a cluster. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project the - cluster belongs to. - region: - Required. The Cloud Dataproc region in which to handle the - request. - cluster_name: - Required. The cluster name. - cluster: - Required. The changes to the cluster. - graceful_decommission_timeout: - Optional. Timeout for graceful YARN decomissioning. Graceful - decommissioning allows removing nodes from the cluster without - interrupting jobs in progress. Timeout specifies how long to - wait for jobs in progress to finish before forcefully removing - nodes (and potentially interrupting jobs). Default timeout is - 0 (for forceful decommission), and the maximum allowed timeout - is 1 day. Only supported on Dataproc image versions 1.2 and - higher. - update_mask: - Required. Specifies the path, relative to ``Cluster``, of the - field to update. For example, to change the number of workers - in a cluster to 5, the ``update_mask`` parameter would be - specified as ``config.worker_config.num_instances``, and the - ``PATCH`` request body would specify the new value, as - follows: :: { "config":{ "workerConfig":{ - "numInstances":"5" } } } Similarly, to - change the number of preemptible workers in a cluster to 5, - the ``update_mask`` parameter would be - ``config.secondary_worker_config.num_instances``, and the - ``PATCH`` request body would be set as follows: :: { - "config":{ "secondaryWorkerConfig":{ - "numInstances":"5" } } } Note: currently - only the following fields can be updated: .. raw:: html - .. raw:: html .. raw:: html .. raw:: html .. raw:: html .. raw:: html - .. raw:: html - .. raw:: html .. raw:: html .. raw:: html .. - raw:: html .. raw:: html .. raw:: html - .. raw:: html .. raw:: html .. raw:: html .. raw:: html .. raw:: - html .. raw:: html .. raw:: html .. raw:: html .. raw:: html - .. raw:: html .. raw:: html .. raw:: html - .. raw:: html .. raw:: html .. raw:: html .. raw:: html .. raw:: html - .. raw:: html .. raw:: html .. raw:: html - .. raw:: html
Mask - .. raw:: html Purpose .. - raw:: html
labels .. raw:: html Updates labels .. raw:: html -
config.worker\_config.num\_instances .. - raw:: html Resize primary - worker group .. raw:: html
- config.secondary\_worker\_config.num\_instances .. raw:: html - Resize secondary worker group - .. raw:: html
- config.lifecycle\_config.auto\_delete\_ttl .. raw:: html - Reset MAX TTL duration .. - raw:: html
- config.lifecycle\_config.auto\_delete\_time .. raw:: html - Update MAX TTL deletion - timestamp .. raw:: html
- config.lifecycle\_config.idle\_delete\_ttl .. raw:: html - Update Idle TTL duration .. - raw:: html
- config.autoscaling\_config.policy\_uri .. raw:: html - Use, stop using, or change - autoscaling policies .. raw:: html
- request_id: - Optional. A unique id used to identify the request. If the - server receives two [UpdateClusterRequest][google.cloud.datapr - oc.v1beta2.UpdateClusterRequest] requests with the same id, - then the second request will be ignored and the first - [google.longrunning.Operation][google.longrunning.Operation] - created and stored in the backend is returned. It is - recommended to always set this value to a `UUID `__. The id - must contain only letters (a-z, A-Z), numbers (0-9), - underscores (\_), and hyphens (-). The maximum length is 40 - characters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.UpdateClusterRequest) - ), -) -_sym_db.RegisterMessage(UpdateClusterRequest) - -DeleteClusterRequest = _reflection.GeneratedProtocolMessageType( - "DeleteClusterRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETECLUSTERREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""A request to delete a cluster. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - cluster belongs to. - region: - Required. The Cloud Dataproc region in which to handle the - request. - cluster_name: - Required. The cluster name. - cluster_uuid: - Optional. Specifying the ``cluster_uuid`` means the RPC should - fail (with error NOT\_FOUND) if cluster with specified UUID - does not exist. - request_id: - Optional. A unique id used to identify the request. If the - server receives two [DeleteClusterRequest][google.cloud.datapr - oc.v1beta2.DeleteClusterRequest] requests with the same id, - then the second request will be ignored and the first - [google.longrunning.Operation][google.longrunning.Operation] - created and stored in the backend is returned. It is - recommended to always set this value to a `UUID `__. The id - must contain only letters (a-z, A-Z), numbers (0-9), - underscores (\_), and hyphens (-). The maximum length is 40 - characters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.DeleteClusterRequest) - ), -) -_sym_db.RegisterMessage(DeleteClusterRequest) - -GetClusterRequest = _reflection.GeneratedProtocolMessageType( - "GetClusterRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETCLUSTERREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Request to get the resource representation for a cluster - in a project. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - cluster belongs to. - region: - Required. The Cloud Dataproc region in which to handle the - request. - cluster_name: - Required. The cluster name. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.GetClusterRequest) - ), -) -_sym_db.RegisterMessage(GetClusterRequest) - -ListClustersRequest = _reflection.GeneratedProtocolMessageType( - "ListClustersRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTCLUSTERSREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""A request to list the clusters in a project. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - cluster belongs to. - region: - Required. The Cloud Dataproc region in which to handle the - request. - filter: - Optional. A filter constraining the clusters to list. Filters - are case-sensitive and have the following syntax: field = - value [AND [field = value]] ... where **field** is one of - ``status.state``, ``clusterName``, or ``labels.[KEY]``, and - ``[KEY]`` is a label key. **value** can be ``*`` to match all - values. ``status.state`` can be one of the following: - ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, - ``ERROR``, ``DELETING``, or ``UPDATING``. ``ACTIVE`` contains - the ``CREATING``, ``UPDATING``, and ``RUNNING`` states. - ``INACTIVE`` contains the ``DELETING`` and ``ERROR`` states. - ``clusterName`` is the name of the cluster provided at - creation time. Only the logical ``AND`` operator is supported; - space-separated items are treated as having an implicit - ``AND`` operator. Example filter: status.state = ACTIVE AND - clusterName = mycluster AND labels.env = staging AND - labels.starred = \* - page_size: - Optional. The standard List page size. - page_token: - Optional. The standard List page token. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ListClustersRequest) - ), -) -_sym_db.RegisterMessage(ListClustersRequest) - -ListClustersResponse = _reflection.GeneratedProtocolMessageType( - "ListClustersResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTCLUSTERSRESPONSE, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""The list of all clusters in a project. - - - Attributes: - clusters: - Output only. The clusters in the project. - next_page_token: - Output only. This token is included in the response if there - are more results to fetch. To fetch additional results, - provide this value as the ``page_token`` in a subsequent - ListClustersRequest. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ListClustersResponse) - ), -) -_sym_db.RegisterMessage(ListClustersResponse) - -DiagnoseClusterRequest = _reflection.GeneratedProtocolMessageType( - "DiagnoseClusterRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DIAGNOSECLUSTERREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""A request to collect cluster diagnostic information. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - cluster belongs to. - region: - Required. The Cloud Dataproc region in which to handle the - request. - cluster_name: - Required. The cluster name. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.DiagnoseClusterRequest) - ), -) -_sym_db.RegisterMessage(DiagnoseClusterRequest) - -DiagnoseClusterResults = _reflection.GeneratedProtocolMessageType( - "DiagnoseClusterResults", - (_message.Message,), - dict( - DESCRIPTOR=_DIAGNOSECLUSTERRESULTS, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""The location of diagnostic output. - - - Attributes: - output_uri: - Output only. The Cloud Storage URI of the diagnostic output. - The output report is a plain text file with a summary of - collected diagnostics. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.DiagnoseClusterResults) - ), -) -_sym_db.RegisterMessage(DiagnoseClusterResults) - -ReservationAffinity = _reflection.GeneratedProtocolMessageType( - "ReservationAffinity", - (_message.Message,), - dict( - DESCRIPTOR=_RESERVATIONAFFINITY, - __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Reservation Affinity for consuming Zonal reservation. - - - Attributes: - consume_reservation_type: - Optional. Type of reservation to consume - key: - Optional. Corresponds to the label key of reservation - resource. - values: - Optional. Corresponds to the label values of reservation - resource. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ReservationAffinity) - ), -) -_sym_db.RegisterMessage(ReservationAffinity) - - -DESCRIPTOR._options = None -_CLUSTER_LABELSENTRY._options = None -_CLUSTER.fields_by_name["project_id"]._options = None -_CLUSTER.fields_by_name["cluster_name"]._options = None -_CLUSTER.fields_by_name["config"]._options = None -_CLUSTER.fields_by_name["labels"]._options = None -_CLUSTER.fields_by_name["status"]._options = None -_CLUSTER.fields_by_name["status_history"]._options = None -_CLUSTER.fields_by_name["cluster_uuid"]._options = None -_CLUSTER.fields_by_name["metrics"]._options = None -_CLUSTERCONFIG.fields_by_name["config_bucket"]._options = None -_CLUSTERCONFIG.fields_by_name["gce_cluster_config"]._options = None -_CLUSTERCONFIG.fields_by_name["master_config"]._options = None -_CLUSTERCONFIG.fields_by_name["worker_config"]._options = None -_CLUSTERCONFIG.fields_by_name["secondary_worker_config"]._options = None -_CLUSTERCONFIG.fields_by_name["software_config"]._options = None -_CLUSTERCONFIG.fields_by_name["lifecycle_config"]._options = None -_CLUSTERCONFIG.fields_by_name["initialization_actions"]._options = None -_CLUSTERCONFIG.fields_by_name["encryption_config"]._options = None -_CLUSTERCONFIG.fields_by_name["autoscaling_config"]._options = None -_CLUSTERCONFIG.fields_by_name["endpoint_config"]._options = None -_CLUSTERCONFIG.fields_by_name["security_config"]._options = None -_ENDPOINTCONFIG_HTTPPORTSENTRY._options = None -_ENDPOINTCONFIG.fields_by_name["http_ports"]._options = None -_ENDPOINTCONFIG.fields_by_name["enable_http_port_access"]._options = None -_AUTOSCALINGCONFIG.fields_by_name["policy_uri"]._options = None -_ENCRYPTIONCONFIG.fields_by_name["gce_pd_kms_key_name"]._options = None -_GCECLUSTERCONFIG_METADATAENTRY._options = None -_GCECLUSTERCONFIG.fields_by_name["zone_uri"]._options = None -_GCECLUSTERCONFIG.fields_by_name["network_uri"]._options = None -_GCECLUSTERCONFIG.fields_by_name["subnetwork_uri"]._options = None -_GCECLUSTERCONFIG.fields_by_name["internal_ip_only"]._options = None -_GCECLUSTERCONFIG.fields_by_name["service_account"]._options = None -_GCECLUSTERCONFIG.fields_by_name["service_account_scopes"]._options = None -_GCECLUSTERCONFIG.fields_by_name["reservation_affinity"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["num_instances"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["instance_names"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["image_uri"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["machine_type_uri"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["disk_config"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["is_preemptible"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["managed_group_config"]._options = None -_INSTANCEGROUPCONFIG.fields_by_name["accelerators"]._options = None -_MANAGEDGROUPCONFIG.fields_by_name["instance_template_name"]._options = None -_MANAGEDGROUPCONFIG.fields_by_name["instance_group_manager_name"]._options = None -_DISKCONFIG.fields_by_name["boot_disk_type"]._options = None -_DISKCONFIG.fields_by_name["boot_disk_size_gb"]._options = None -_LIFECYCLECONFIG.fields_by_name["idle_delete_ttl"]._options = None -_LIFECYCLECONFIG.fields_by_name["idle_start_time"]._options = None -_KERBEROSCONFIG.fields_by_name["enable_kerberos"]._options = None -_KERBEROSCONFIG.fields_by_name["root_principal_password_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["kms_key_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["keystore_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["truststore_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["keystore_password_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["key_password_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["truststore_password_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["cross_realm_trust_realm"]._options = None -_KERBEROSCONFIG.fields_by_name["cross_realm_trust_kdc"]._options = None -_KERBEROSCONFIG.fields_by_name["cross_realm_trust_admin_server"]._options = None -_KERBEROSCONFIG.fields_by_name["cross_realm_trust_shared_password_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["kdc_db_key_uri"]._options = None -_KERBEROSCONFIG.fields_by_name["tgt_lifetime_hours"]._options = None -_KERBEROSCONFIG.fields_by_name["realm"]._options = None -_NODEINITIALIZATIONACTION.fields_by_name["executable_file"]._options = None -_NODEINITIALIZATIONACTION.fields_by_name["execution_timeout"]._options = None -_CLUSTERSTATUS.fields_by_name["state"]._options = None -_CLUSTERSTATUS.fields_by_name["detail"]._options = None -_CLUSTERSTATUS.fields_by_name["state_start_time"]._options = None -_CLUSTERSTATUS.fields_by_name["substate"]._options = None -_SOFTWARECONFIG_PROPERTIESENTRY._options = None -_SOFTWARECONFIG.fields_by_name["image_version"]._options = None -_SOFTWARECONFIG.fields_by_name["properties"]._options = None -_CLUSTERMETRICS_HDFSMETRICSENTRY._options = None -_CLUSTERMETRICS_YARNMETRICSENTRY._options = None -_CREATECLUSTERREQUEST.fields_by_name["project_id"]._options = None -_CREATECLUSTERREQUEST.fields_by_name["region"]._options = None -_CREATECLUSTERREQUEST.fields_by_name["cluster"]._options = None -_CREATECLUSTERREQUEST.fields_by_name["request_id"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["project_id"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["region"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["cluster"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["graceful_decommission_timeout"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["update_mask"]._options = None -_UPDATECLUSTERREQUEST.fields_by_name["request_id"]._options = None -_DELETECLUSTERREQUEST.fields_by_name["project_id"]._options = None -_DELETECLUSTERREQUEST.fields_by_name["region"]._options = None -_DELETECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None -_DELETECLUSTERREQUEST.fields_by_name["cluster_uuid"]._options = None -_DELETECLUSTERREQUEST.fields_by_name["request_id"]._options = None -_GETCLUSTERREQUEST.fields_by_name["project_id"]._options = None -_GETCLUSTERREQUEST.fields_by_name["region"]._options = None -_GETCLUSTERREQUEST.fields_by_name["cluster_name"]._options = None -_LISTCLUSTERSREQUEST.fields_by_name["project_id"]._options = None -_LISTCLUSTERSREQUEST.fields_by_name["region"]._options = None -_LISTCLUSTERSREQUEST.fields_by_name["filter"]._options = None -_LISTCLUSTERSREQUEST.fields_by_name["page_size"]._options = None -_LISTCLUSTERSREQUEST.fields_by_name["page_token"]._options = None -_LISTCLUSTERSRESPONSE.fields_by_name["clusters"]._options = None -_LISTCLUSTERSRESPONSE.fields_by_name["next_page_token"]._options = None -_DIAGNOSECLUSTERREQUEST.fields_by_name["project_id"]._options = None -_DIAGNOSECLUSTERREQUEST.fields_by_name["region"]._options = None -_DIAGNOSECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None -_DIAGNOSECLUSTERRESULTS.fields_by_name["output_uri"]._options = None -_RESERVATIONAFFINITY.fields_by_name["consume_reservation_type"]._options = None -_RESERVATIONAFFINITY.fields_by_name["key"]._options = None -_RESERVATIONAFFINITY.fields_by_name["values"]._options = None - -_CLUSTERCONTROLLER = _descriptor.ServiceDescriptor( - name="ClusterController", - full_name="google.cloud.dataproc.v1beta2.ClusterController", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" - ), - serialized_start=6587, - serialized_end=8354, - methods=[ - _descriptor.MethodDescriptor( - name="CreateCluster", - full_name="google.cloud.dataproc.v1beta2.ClusterController.CreateCluster", - index=0, - containing_service=None, - input_type=_CREATECLUSTERREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - '\202\323\344\223\002C"8/v1beta2/projects/{project_id}/regions/{region}/clusters:\007cluster\332A\033project_id, region, cluster\312AA\n\007Cluster\0226google.cloud.dataproc.v1beta2.ClusterOperationMetadata' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateCluster", - full_name="google.cloud.dataproc.v1beta2.ClusterController.UpdateCluster", - index=1, - containing_service=None, - input_type=_UPDATECLUSTERREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - "\202\323\344\223\002R2G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster\332A6project_id, region, cluster_name, cluster, update_mask\312AA\n\007Cluster\0226google.cloud.dataproc.v1beta2.ClusterOperationMetadata" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteCluster", - full_name="google.cloud.dataproc.v1beta2.ClusterController.DeleteCluster", - index=2, - containing_service=None, - input_type=_DELETECLUSTERREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - "\202\323\344\223\002I*G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\332A project_id, region, cluster_name\312AO\n\025google.protobuf.Empty\0226google.cloud.dataproc.v1beta2.ClusterOperationMetadata" - ), - ), - _descriptor.MethodDescriptor( - name="GetCluster", - full_name="google.cloud.dataproc.v1beta2.ClusterController.GetCluster", - index=3, - containing_service=None, - input_type=_GETCLUSTERREQUEST, - output_type=_CLUSTER, - serialized_options=_b( - "\202\323\344\223\002I\022G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\332A project_id, region, cluster_name" - ), - ), - _descriptor.MethodDescriptor( - name="ListClusters", - full_name="google.cloud.dataproc.v1beta2.ClusterController.ListClusters", - index=4, - containing_service=None, - input_type=_LISTCLUSTERSREQUEST, - output_type=_LISTCLUSTERSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002:\0228/v1beta2/projects/{project_id}/regions/{region}/clusters\332A\022project_id, region\332A\032project_id, region, filter" - ), - ), - _descriptor.MethodDescriptor( - name="DiagnoseCluster", - full_name="google.cloud.dataproc.v1beta2.ClusterController.DiagnoseCluster", - index=5, - containing_service=None, - input_type=_DIAGNOSECLUSTERREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - '\202\323\344\223\002U"P/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\001*\332A project_id, region, cluster_name\312AO\n\025google.protobuf.Empty\0226google.cloud.dataproc.v1beta2.ClusterOperationMetadata' - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_CLUSTERCONTROLLER) - -DESCRIPTOR.services_by_name["ClusterController"] = _CLUSTERCONTROLLER - -# @@protoc_insertion_point(module_scope) diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py deleted file mode 100644 index de9821404290..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py +++ /dev/null @@ -1,151 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.dataproc_v1beta2.proto import ( - clusters_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) - - -class ClusterControllerStub(object): - """The ClusterControllerService provides methods to manage clusters - of Compute Engine instances. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateCluster = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.ClusterController/CreateCluster", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.CreateClusterRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.UpdateCluster = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.ClusterController/UpdateCluster", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.UpdateClusterRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.DeleteCluster = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.ClusterController/DeleteCluster", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.DeleteClusterRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.GetCluster = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.ClusterController/GetCluster", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.GetClusterRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.Cluster.FromString, - ) - self.ListClusters = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.ClusterController/ListClusters", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.ListClustersRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.ListClustersResponse.FromString, - ) - self.DiagnoseCluster = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.ClusterController/DiagnoseCluster", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.DiagnoseClusterRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - - -class ClusterControllerServicer(object): - """The ClusterControllerService provides methods to manage clusters - of Compute Engine instances. - """ - - def CreateCluster(self, request, context): - """Creates a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateCluster(self, request, context): - """Updates a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteCluster(self, request, context): - """Deletes a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetCluster(self, request, context): - """Gets the resource representation for a cluster in a project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListClusters(self, request, context): - """Lists all regions/{region}/clusters in a project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DiagnoseCluster(self, request, context): - """Gets cluster diagnostic information. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). - After the operation completes, - [Operation.response][google.longrunning.Operation.response] - contains - [Empty](google.protobuf.Empty). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_ClusterControllerServicer_to_server(servicer, server): - rpc_method_handlers = { - "CreateCluster": grpc.unary_unary_rpc_method_handler( - servicer.CreateCluster, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.CreateClusterRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "UpdateCluster": grpc.unary_unary_rpc_method_handler( - servicer.UpdateCluster, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.UpdateClusterRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "DeleteCluster": grpc.unary_unary_rpc_method_handler( - servicer.DeleteCluster, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.DeleteClusterRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "GetCluster": grpc.unary_unary_rpc_method_handler( - servicer.GetCluster, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.GetClusterRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.Cluster.SerializeToString, - ), - "ListClusters": grpc.unary_unary_rpc_method_handler( - servicer.ListClusters, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.ListClustersRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.ListClustersResponse.SerializeToString, - ), - "DiagnoseCluster": grpc.unary_unary_rpc_method_handler( - servicer.DiagnoseCluster, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.DiagnoseClusterRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.dataproc.v1beta2.ClusterController", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs.proto b/dataproc/google/cloud/dataproc_v1beta2/proto/jobs.proto deleted file mode 100644 index c1e643c92fd1..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs.proto +++ /dev/null @@ -1,829 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.dataproc.v1beta2; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "JobsProto"; -option java_package = "com.google.cloud.dataproc.v1beta2"; - -// The JobController provides methods to manage jobs. -service JobController { - option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Submits a job to a cluster. - rpc SubmitJob(SubmitJobRequest) returns (Job) { - option (google.api.http) = { - post: "/v1beta2/projects/{project_id}/regions/{region}/jobs:submit" - body: "*" - }; - option (google.api.method_signature) = "project_id, region, job"; - } - - // Gets the resource representation for a job in a project. - rpc GetJob(GetJobRequest) returns (Job) { - option (google.api.http) = { - get: "/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}" - }; - option (google.api.method_signature) = "project_id, region, job_id"; - } - - // Lists regions/{region}/jobs in a project. - rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { - option (google.api.http) = { - get: "/v1beta2/projects/{project_id}/regions/{region}/jobs" - }; - option (google.api.method_signature) = "project_id, region"; - option (google.api.method_signature) = "project_id, region, filter"; - } - - // Updates a job in a project. - rpc UpdateJob(UpdateJobRequest) returns (Job) { - option (google.api.http) = { - patch: "/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}" - body: "job" - }; - } - - // Starts a job cancellation request. To access the job resource - // after cancellation, call - // [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list) - // or - // [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get). - rpc CancelJob(CancelJobRequest) returns (Job) { - option (google.api.http) = { - post: "/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" - body: "*" - }; - option (google.api.method_signature) = "project_id, region, job_id"; - } - - // Deletes the job from the project. If the job is active, the delete fails, - // and the response returns `FAILED_PRECONDITION`. - rpc DeleteJob(DeleteJobRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}" - }; - option (google.api.method_signature) = "project_id, region, job_id"; - } -} - -// The runtime logging config of the job. -message LoggingConfig { - // The Log4j level for job execution. When running an - // [Apache Hive](http://hive.apache.org/) job, Cloud - // Dataproc configures the Hive client to an equivalent verbosity level. - enum Level { - // Level is unspecified. Use default level for log4j. - LEVEL_UNSPECIFIED = 0; - - // Use ALL level for log4j. - ALL = 1; - - // Use TRACE level for log4j. - TRACE = 2; - - // Use DEBUG level for log4j. - DEBUG = 3; - - // Use INFO level for log4j. - INFO = 4; - - // Use WARN level for log4j. - WARN = 5; - - // Use ERROR level for log4j. - ERROR = 6; - - // Use FATAL level for log4j. - FATAL = 7; - - // Turn off log4j. - OFF = 8; - } - - // The per-package log levels for the driver. This may include - // "root" package name to configure rootLogger. - // Examples: - // 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' - map driver_log_levels = 2; -} - -// A Cloud Dataproc job for running -// [Apache Hadoop -// MapReduce](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html) -// jobs on [Apache Hadoop -// YARN](https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html). -message HadoopJob { - // Required. Indicates the location of the driver's main class. Specify - // either the jar file that contains the main class or the main class name. - // To specify both, add the jar file to `jar_file_uris`, and then specify - // the main class name in this property. - oneof driver { - // The HCFS URI of the jar file containing the main class. - // Examples: - // 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' - // 'hdfs:/tmp/test-samples/custom-wordcount.jar' - // 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar' - string main_jar_file_uri = 1; - - // The name of the driver's main class. The jar file containing the class - // must be in the default CLASSPATH or specified in `jar_file_uris`. - string main_class = 2; - } - - // Optional. The arguments to pass to the driver. Do not - // include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as - // job properties, since a collision may occur that causes an incorrect job - // submission. - repeated string args = 3; - - // Optional. Jar file URIs to add to the CLASSPATHs of the - // Hadoop driver and tasks. - repeated string jar_file_uris = 4; - - // Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied - // to the working directory of Hadoop drivers and distributed tasks. Useful - // for naively parallel tasks. - repeated string file_uris = 5; - - // Optional. HCFS URIs of archives to be extracted in the working directory of - // Hadoop drivers and tasks. Supported file types: - // .jar, .tar, .tar.gz, .tgz, or .zip. - repeated string archive_uris = 6; - - // Optional. A mapping of property names to values, used to configure Hadoop. - // Properties that conflict with values set by the Cloud Dataproc API may be - // overwritten. Can include properties set in /etc/hadoop/conf/*-site and - // classes in user code. - map properties = 7; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8; -} - -// A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/) -// applications on YARN. -// The specification of the main method to call to drive the job. -// Specify either the jar file that contains the main class or the main class -// name. To pass both a main jar and a main class in that jar, add the jar to -// `CommonJob.jar_file_uris`, and then specify the main class name in -// `main_class`. -message SparkJob { - oneof driver { - // The HCFS URI of the jar file that contains the main class. - string main_jar_file_uri = 1; - - // The name of the driver's main class. The jar file that contains the class - // must be in the default CLASSPATH or specified in `jar_file_uris`. - string main_class = 2; - } - - // Optional. The arguments to pass to the driver. Do not include arguments, - // such as `--conf`, that can be set as job properties, since a collision may - // occur that causes an incorrect job submission. - repeated string args = 3; - - // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the - // Spark driver and tasks. - repeated string jar_file_uris = 4; - - // Optional. HCFS URIs of files to be copied to the working directory of - // Spark drivers and distributed tasks. Useful for naively parallel tasks. - repeated string file_uris = 5; - - // Optional. HCFS URIs of archives to be extracted in the working directory - // of Spark drivers and tasks. Supported file types: - // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 6; - - // Optional. A mapping of property names to values, used to configure Spark. - // Properties that conflict with values set by the Cloud Dataproc API may be - // overwritten. Can include properties set in - // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 7; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8; -} - -// A Cloud Dataproc job for running -// [Apache -// PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html) -// applications on YARN. -message PySparkJob { - // Required. The HCFS URI of the main Python file to use as the driver. Must - // be a .py file. - string main_python_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The arguments to pass to the driver. Do not include arguments, - // such as `--conf`, that can be set as job properties, since a collision may - // occur that causes an incorrect job submission. - repeated string args = 2; - - // Optional. HCFS file URIs of Python files to pass to the PySpark - // framework. Supported file types: .py, .egg, and .zip. - repeated string python_file_uris = 3; - - // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the - // Python driver and tasks. - repeated string jar_file_uris = 4; - - // Optional. HCFS URIs of files to be copied to the working directory of - // Python drivers and distributed tasks. Useful for naively parallel tasks. - repeated string file_uris = 5; - - // Optional. HCFS URIs of archives to be extracted in the working directory of - // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 6; - - // Optional. A mapping of property names to values, used to configure PySpark. - // Properties that conflict with values set by the Cloud Dataproc API may be - // overwritten. Can include properties set in - // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 7; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8; -} - -// A list of queries to run on a cluster. -message QueryList { - // Required. The queries to execute. You do not need to terminate a query - // with a semicolon. Multiple queries can be specified in one string - // by separating each with a semicolon. Here is an example of an Cloud - // Dataproc API snippet that uses a QueryList to specify a HiveJob: - // - // "hiveJob": { - // "queryList": { - // "queries": [ - // "query1", - // "query2", - // "query3;query4", - // ] - // } - // } - repeated string queries = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/) -// queries on YARN. -message HiveJob { - // Required. The sequence of Hive queries to execute, specified as either - // an HCFS file URI or a list of queries. - oneof queries { - // The HCFS URI of the script that contains Hive queries. - string query_file_uri = 1; - - // A list of queries. - QueryList query_list = 2; - } - - // Optional. Whether to continue executing queries if a query fails. - // The default value is `false`. Setting to `true` can be useful when - // executing independent parallel queries. - bool continue_on_failure = 3; - - // Optional. Mapping of query variable names to values (equivalent to the - // Hive command: `SET name="value";`). - map script_variables = 4; - - // Optional. A mapping of property names and values, used to configure Hive. - // Properties that conflict with values set by the Cloud Dataproc API may be - // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, - // /etc/hive/conf/hive-site.xml, and classes in user code. - map properties = 5; - - // Optional. HCFS URIs of jar files to add to the CLASSPATH of the - // Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes - // and UDFs. - repeated string jar_file_uris = 6; -} - -// A Cloud Dataproc job for running [Apache Spark -// SQL](http://spark.apache.org/sql/) queries. -message SparkSqlJob { - // Required. The sequence of Spark SQL queries to execute, specified as - // either an HCFS file URI or as a list of queries. - oneof queries { - // The HCFS URI of the script that contains SQL queries. - string query_file_uri = 1; - - // A list of queries. - QueryList query_list = 2; - } - - // Optional. Mapping of query variable names to values (equivalent to the - // Spark SQL command: SET `name="value";`). - map script_variables = 3; - - // Optional. A mapping of property names to values, used to configure - // Spark SQL's SparkConf. Properties that conflict with values set by the - // Cloud Dataproc API may be overwritten. - map properties = 4; - - // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. - repeated string jar_file_uris = 56; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 6; -} - -// A Cloud Dataproc job for running [Apache Pig](https://pig.apache.org/) -// queries on YARN. -message PigJob { - // Required. The sequence of Pig queries to execute, specified as an HCFS - // file URI or a list of queries. - oneof queries { - // The HCFS URI of the script that contains the Pig queries. - string query_file_uri = 1; - - // A list of queries. - QueryList query_list = 2; - } - - // Optional. Whether to continue executing queries if a query fails. - // The default value is `false`. Setting to `true` can be useful when - // executing independent parallel queries. - bool continue_on_failure = 3; - - // Optional. Mapping of query variable names to values (equivalent to the Pig - // command: `name=[value]`). - map script_variables = 4; - - // Optional. A mapping of property names to values, used to configure Pig. - // Properties that conflict with values set by the Cloud Dataproc API may be - // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, - // /etc/pig/conf/pig.properties, and classes in user code. - map properties = 5; - - // Optional. HCFS URIs of jar files to add to the CLASSPATH of - // the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. - repeated string jar_file_uris = 6; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 7; -} - -// A Cloud Dataproc job for running -// [Apache SparkR](https://spark.apache.org/docs/latest/sparkr.html) -// applications on YARN. -message SparkRJob { - // Required. The HCFS URI of the main R file to use as the driver. - // Must be a .R file. - string main_r_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The arguments to pass to the driver. Do not include arguments, - // such as `--conf`, that can be set as job properties, since a collision may - // occur that causes an incorrect job submission. - repeated string args = 2; - - // Optional. HCFS URIs of files to be copied to the working directory of - // R drivers and distributed tasks. Useful for naively parallel tasks. - repeated string file_uris = 3; - - // Optional. HCFS URIs of archives to be extracted in the working directory of - // Spark drivers and tasks. Supported file types: - // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 4; - - // Optional. A mapping of property names to values, used to configure SparkR. - // Properties that conflict with values set by the Cloud Dataproc API may be - // overwritten. Can include properties set in - // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 5; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 6; -} - -// Cloud Dataproc job config. -message JobPlacement { - // Required. The name of the cluster where the job will be submitted. - string cluster_name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Output only. A cluster UUID generated by the Cloud Dataproc service when - // the job is submitted. - string cluster_uuid = 2; -} - -// Cloud Dataproc job status. -message JobStatus { - // The job state. - enum State { - // The job state is unknown. - STATE_UNSPECIFIED = 0; - - // The job is pending; it has been submitted, but is not yet running. - PENDING = 1; - - // Job has been received by the service and completed initial setup; - // it will soon be submitted to the cluster. - SETUP_DONE = 8; - - // The job is running on the cluster. - RUNNING = 2; - - // A CancelJob request has been received, but is pending. - CANCEL_PENDING = 3; - - // Transient in-flight resources have been canceled, and the request to - // cancel the running job has been issued to the cluster. - CANCEL_STARTED = 7; - - // The job cancellation was successful. - CANCELLED = 4; - - // The job has completed successfully. - DONE = 5; - - // The job has completed, but encountered an error. - ERROR = 6; - - // Job attempt has failed. The detail field contains failure details for - // this attempt. - // - // Applies to restartable jobs only. - ATTEMPT_FAILURE = 9; - } - - // The job substate. - enum Substate { - // The job substate is unknown. - UNSPECIFIED = 0; - - // The Job is submitted to the agent. - // - // Applies to RUNNING state. - SUBMITTED = 1; - - // The Job has been received and is awaiting execution (it may be waiting - // for a condition to be met). See the "details" field for the reason for - // the delay. - // - // Applies to RUNNING state. - QUEUED = 2; - - // The agent-reported status is out of date, which may be caused by a - // loss of communication between the agent and Cloud Dataproc. If the - // agent does not send a timely update, the job will fail. - // - // Applies to RUNNING state. - STALE_STATUS = 3; - } - - // Output only. A state message specifying the overall job state. - State state = 1; - - // Output only. Optional job state details, such as an error - // description if the state is ERROR. - string details = 2; - - // Output only. The time when this state was entered. - google.protobuf.Timestamp state_start_time = 6; - - // Output only. Additional state information, which includes - // status reported by the agent. - Substate substate = 7; -} - -// Encapsulates the full scoping used to reference a job. -message JobReference { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The job ID, which must be unique within the project. - // - // The ID must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), or hyphens (-). The maximum length is 100 characters. - // - // If not specified by the caller, the job ID will be provided by the server. - string job_id = 2; -} - -// A YARN application created by a job. Application information is a subset of -// org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. -// -// **Beta Feature**: This report is available for testing purposes only. It may -// be changed before final release. -message YarnApplication { - // The application state, corresponding to - // YarnProtos.YarnApplicationStateProto. - enum State { - // Status is unspecified. - STATE_UNSPECIFIED = 0; - - // Status is NEW. - NEW = 1; - - // Status is NEW_SAVING. - NEW_SAVING = 2; - - // Status is SUBMITTED. - SUBMITTED = 3; - - // Status is ACCEPTED. - ACCEPTED = 4; - - // Status is RUNNING. - RUNNING = 5; - - // Status is FINISHED. - FINISHED = 6; - - // Status is FAILED. - FAILED = 7; - - // Status is KILLED. - KILLED = 8; - } - - // Output only. The application name. - string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The application state. - State state = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The numerical progress of the application, from 1 to 100. - float progress = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. Output only. The HTTP URL of the ApplicationMaster, HistoryServer, or - // TimelineServer that provides application-specific information. The URL uses - // the internal hostname, and requires a proxy server for resolution and, - // possibly, access. - string tracking_url = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A Cloud Dataproc job resource. -message Job { - // Optional. The fully qualified reference to the job, which can be used to - // obtain the equivalent REST path of the job resource. If this property - // is not specified when a job is created, the server generates a - // job_id. - JobReference reference = 1; - - // Required. Job information, including how, when, and where to - // run the job. - JobPlacement placement = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The application/framework-specific portion of the job. - oneof type_job { - // Job is a Hadoop job. - HadoopJob hadoop_job = 3; - - // Job is a Spark job. - SparkJob spark_job = 4; - - // Job is a Pyspark job. - PySparkJob pyspark_job = 5; - - // Job is a Hive job. - HiveJob hive_job = 6; - - // Job is a Pig job. - PigJob pig_job = 7; - - // Job is a SparkR job. - SparkRJob spark_r_job = 21; - - // Job is a SparkSql job. - SparkSqlJob spark_sql_job = 12; - } - - // Output only. The job status. Additional application-specific - // status information may be contained in the type_job - // and yarn_applications fields. - JobStatus status = 8; - - // Output only. The previous job status. - repeated JobStatus status_history = 13; - - // Output only. The collection of YARN applications spun up by this job. - // - // **Beta** Feature: This report is available for testing purposes only. It - // may be changed before final release. - repeated YarnApplication yarn_applications = 9; - - // Output only. The email address of the user submitting the job. For jobs - // submitted on the cluster, the address is username@hostname. - string submitted_by = 10; - - // Output only. A URI pointing to the location of the stdout of the job's - // driver program. - string driver_output_resource_uri = 17; - - // Output only. If present, the location of miscellaneous control files - // which may be used as part of job setup and handling. If not present, - // control files may be placed in the same location as `driver_output_uri`. - string driver_control_files_uri = 15; - - // Optional. The labels to associate with this job. - // Label **keys** must contain 1 to 63 characters, and must conform to - // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). - // Label **values** may be empty, but, if present, must contain 1 to 63 - // characters, and must conform to [RFC - // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be - // associated with a job. - map labels = 18; - - // Optional. Job scheduling configuration. - JobScheduling scheduling = 20; - - // Output only. A UUID that uniquely identifies a job within the project - // over time. This is in contrast to a user-settable reference.job_id that - // may be reused over time. - string job_uuid = 22; -} - -// Job scheduling options. -message JobScheduling { - // Optional. Maximum number of times per hour a driver may be restarted as - // a result of driver terminating with non-zero code before job is - // reported failed. - // - // A job may be reported as thrashing if driver exits with non-zero code - // 4 times within 10 minute window. - // - // Maximum value is 10. - int32 max_failures_per_hour = 1; -} - -// A request to submit a job. -message SubmitJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Cloud Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job resource. - Job job = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A unique id used to identify the request. If the server - // receives two [SubmitJobRequest][google.cloud.dataproc.v1beta2.SubmitJobRequest] requests with the same - // id, then the second request will be ignored and the - // first [Job][google.cloud.dataproc.v1beta2.Job] created and stored in the backend - // is returned. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4; -} - -// A request to get the resource representation for a job in a project. -message GetJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Cloud Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job ID. - string job_id = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to list jobs in a project. -message ListJobsRequest { - // A matcher that specifies categories of job states. - enum JobStateMatcher { - // Match all jobs, regardless of state. - ALL = 0; - - // Only match jobs in non-terminal states: PENDING, RUNNING, or - // CANCEL_PENDING. - ACTIVE = 1; - - // Only match jobs in terminal states: CANCELLED, DONE, or ERROR. - NON_ACTIVE = 2; - } - - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Cloud Dataproc region in which to handle the request. - string region = 6 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The number of results to return in each response. - int32 page_size = 2; - - // Optional. The page token, returned by a previous call, to request the - // next page of results. - string page_token = 3; - - // Optional. If set, the returned jobs list includes only jobs that were - // submitted to the named cluster. - string cluster_name = 4; - - // Optional. Specifies enumerated categories of jobs to list. - // (default = match ALL jobs). - // - // If `filter` is provided, `jobStateMatcher` will be ignored. - JobStateMatcher job_state_matcher = 5; - - // Optional. A filter constraining the jobs to list. Filters are - // case-sensitive and have the following syntax: - // - // [field = value] AND [field [= value]] ... - // - // where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label - // key. **value** can be `*` to match all values. - // `status.state` can be either `ACTIVE` or `NON_ACTIVE`. - // Only the logical `AND` operator is supported; space-separated items are - // treated as having an implicit `AND` operator. - // - // Example filter: - // - // status.state = ACTIVE AND labels.env = staging AND labels.starred = * - string filter = 7; -} - -// A request to update a job. -message UpdateJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Cloud Dataproc region in which to handle the request. - string region = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job ID. - string job_id = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The changes to the job. - Job job = 4 [(google.api.field_behavior) = REQUIRED]; - - // Required. Specifies the path, relative to Job, of - // the field to update. For example, to update the labels of a Job the - // update_mask parameter would be specified as - // labels, and the `PATCH` request body would specify the new - // value. Note: Currently, labels is the only - // field that can be updated. - google.protobuf.FieldMask update_mask = 5 [(google.api.field_behavior) = REQUIRED]; -} - -// A list of jobs in a project. -message ListJobsResponse { - // Output only. Jobs list. - repeated Job jobs = 1; - - // Optional. This token is included in the response if there are more results - // to fetch. To fetch additional results, provide this value as the - // `page_token` in a subsequent ListJobsRequest. - string next_page_token = 2; -} - -// A request to cancel a job. -message CancelJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Cloud Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job ID. - string job_id = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to delete a job. -message DeleteJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Cloud Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job ID. - string job_id = 2 [(google.api.field_behavior) = REQUIRED]; -} diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py deleted file mode 100644 index c40e358b62cf..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py +++ /dev/null @@ -1,4571 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dataproc_v1beta2/proto/jobs.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dataproc_v1beta2/proto/jobs.proto", - package="google.cloud.dataproc.v1beta2", - syntax="proto3", - serialized_options=_b( - "\n!com.google.cloud.dataproc.v1beta2B\tJobsProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" - ), - serialized_pb=_b( - '\n.google/cloud/dataproc_v1beta2/proto/jobs.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xcb\x02\n\rLoggingConfig\x12\\\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32\x41.google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry\x1aj\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xdd\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12L\n\nproperties\x18\x07 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xdb\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12K\n\nproperties\x18\x07 \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xdf\x02\n\nPySparkJob\x12!\n\x14main_python_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x18\n\x10python_file_uris\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12M\n\nproperties\x18\x07 \x03(\x0b\x32\x39.google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"!\n\tQueryList\x12\x14\n\x07queries\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xb0\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12U\n\x10script_variables\x18\x04 \x03(\x0b\x32;.google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x36.google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xe5\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12Y\n\x10script_variables\x18\x03 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry\x12N\n\nproperties\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x38 \x03(\t\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xf3\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12T\n\x10script_variables\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry\x12I\n\nproperties\x18\x05 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x12\x44\n\x0elogging_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xa7\x02\n\tSparkRJob\x12\x1c\n\x0fmain_r_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x11\n\tfile_uris\x18\x03 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x04 \x03(\t\x12L\n\nproperties\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"?\n\x0cJobPlacement\x12\x19\n\x0c\x63luster_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x14\n\x0c\x63luster_uuid\x18\x02 \x01(\t"\xcc\x03\n\tJobStatus\x12=\n\x05state\x18\x01 \x01(\x0e\x32..google.cloud.dataproc.v1beta2.JobStatus.State\x12\x0f\n\x07\x64\x65tails\x18\x02 \x01(\t\x12\x34\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x43\n\x08substate\x18\x07 \x01(\x0e\x32\x31.google.cloud.dataproc.v1beta2.JobStatus.Substate"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"7\n\x0cJobReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\xaa\x02\n\x0fYarnApplication\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12H\n\x05state\x18\x02 \x01(\x0e\x32\x34.google.cloud.dataproc.v1beta2.YarnApplication.StateB\x03\xe0\x41\x03\x12\x15\n\x08progress\x18\x03 \x01(\x02\x42\x03\xe0\x41\x03\x12\x19\n\x0ctracking_url\x18\x04 \x01(\tB\x03\xe0\x41\x03"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\xb8\x08\n\x03Job\x12>\n\treference\x18\x01 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobReference\x12\x43\n\tplacement\x18\x02 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobPlacementB\x03\xe0\x41\x02\x12>\n\nhadoop_job\x18\x03 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x06 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12?\n\x0bspark_r_job\x18\x15 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.SparkRJobH\x00\x12\x43\n\rspark_sql_job\x18\x0c \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x38\n\x06status\x18\x08 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12@\n\x0estatus_history\x18\r \x03(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12I\n\x11yarn_applications\x18\t \x03(\x0b\x32..google.cloud.dataproc.v1beta2.YarnApplication\x12\x14\n\x0csubmitted_by\x18\n \x01(\t\x12"\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\t\x12 \n\x18\x64river_control_files_uri\x18\x0f \x01(\t\x12>\n\x06labels\x18\x12 \x03(\x0b\x32..google.cloud.dataproc.v1beta2.Job.LabelsEntry\x12@\n\nscheduling\x18\x14 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x10\n\x08job_uuid\x18\x16 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job".\n\rJobScheduling\x12\x1d\n\x15max_failures_per_hour\x18\x01 \x01(\x05"\x8a\x01\n\x10SubmitJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x34\n\x03job\x18\x02 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.JobB\x03\xe0\x41\x02\x12\x12\n\nrequest_id\x18\x04 \x01(\t"R\n\rGetJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x9f\x02\n\x0fListJobsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x04 \x01(\t\x12Y\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32>.google.cloud.dataproc.v1beta2.ListJobsRequest.JobStateMatcher\x12\x0e\n\x06\x66ilter\x18\x07 \x01(\t"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xc1\x01\n\x10UpdateJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x34\n\x03job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.JobB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"]\n\x10ListJobsResponse\x12\x30\n\x04jobs\x18\x01 \x03(\x0b\x32".google.cloud.dataproc.v1beta2.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x10\x43\x61ncelJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"U\n\x10\x44\x65leteJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x32\xfb\t\n\rJobController\x12\xc2\x01\n\tSubmitJob\x12/.google.cloud.dataproc.v1beta2.SubmitJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"`\x82\xd3\xe4\x93\x02@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\x01*\xda\x41\x17project_id, region, job\x12\xbe\x01\n\x06GetJob\x12,.google.cloud.dataproc.v1beta2.GetJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"b\x82\xd3\xe4\x93\x02?\x12=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x1aproject_id, region, job_id\x12\xdb\x01\n\x08ListJobs\x12..google.cloud.dataproc.v1beta2.ListJobsRequest\x1a/.google.cloud.dataproc.v1beta2.ListJobsResponse"n\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta2/projects/{project_id}/regions/{region}/jobs\xda\x41\x12project_id, region\xda\x41\x1aproject_id, region, filter\x12\xac\x01\n\tUpdateJob\x12/.google.cloud.dataproc.v1beta2.UpdateJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"J\x82\xd3\xe4\x93\x02\x44\x32=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xce\x01\n\tCancelJob\x12/.google.cloud.dataproc.v1beta2.CancelJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"l\x82\xd3\xe4\x93\x02I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\xda\x41\x1aproject_id, region, job_id\x12\xb8\x01\n\tDeleteJob\x12/.google.cloud.dataproc.v1beta2.DeleteJobRequest\x1a\x16.google.protobuf.Empty"b\x82\xd3\xe4\x93\x02?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x1aproject_id, region, job_id\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBw\n!com.google.cloud.dataproc.v1beta2B\tJobsProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_LOGGINGCONFIG_LEVEL = _descriptor.EnumDescriptor( - name="Level", - full_name="google.cloud.dataproc.v1beta2.LoggingConfig.Level", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="LEVEL_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALL", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="TRACE", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DEBUG", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="INFO", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="WARN", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ERROR", index=6, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FATAL", index=7, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="OFF", index=8, number=8, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=485, - serialized_end=597, -) -_sym_db.RegisterEnumDescriptor(_LOGGINGCONFIG_LEVEL) - -_JOBSTATUS_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.cloud.dataproc.v1beta2.JobStatus.State", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="STATE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="PENDING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SETUP_DONE", index=2, number=8, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", index=3, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CANCEL_PENDING", index=4, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CANCEL_STARTED", index=5, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CANCELLED", index=6, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DONE", index=7, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ERROR", index=8, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ATTEMPT_FAILURE", - index=9, - number=9, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3696, - serialized_end=3865, -) -_sym_db.RegisterEnumDescriptor(_JOBSTATUS_STATE) - -_JOBSTATUS_SUBSTATE = _descriptor.EnumDescriptor( - name="Substate", - full_name="google.cloud.dataproc.v1beta2.JobStatus.Substate", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="UNSPECIFIED", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SUBMITTED", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="QUEUED", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="STALE_STATUS", index=3, number=3, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3867, - serialized_end=3939, -) -_sym_db.RegisterEnumDescriptor(_JOBSTATUS_SUBSTATE) - -_YARNAPPLICATION_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.cloud.dataproc.v1beta2.YarnApplication.State", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="STATE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="NEW", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="NEW_SAVING", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SUBMITTED", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ACCEPTED", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FINISHED", index=6, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FAILED", index=7, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="KILLED", index=8, number=8, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4162, - serialized_end=4297, -) -_sym_db.RegisterEnumDescriptor(_YARNAPPLICATION_STATE) - -_LISTJOBSREQUEST_JOBSTATEMATCHER = _descriptor.EnumDescriptor( - name="JobStateMatcher", - full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.JobStateMatcher", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="ALL", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ACTIVE", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="NON_ACTIVE", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=5889, - serialized_end=5943, -) -_sym_db.RegisterEnumDescriptor(_LISTJOBSREQUEST_JOBSTATEMATCHER) - - -_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY = _descriptor.Descriptor( - name="DriverLogLevelsEntry", - full_name="google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry.value", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=377, - serialized_end=483, -) - -_LOGGINGCONFIG = _descriptor.Descriptor( - name="LoggingConfig", - full_name="google.cloud.dataproc.v1beta2.LoggingConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="driver_log_levels", - full_name="google.cloud.dataproc.v1beta2.LoggingConfig.driver_log_levels", - index=0, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY], - enum_types=[_LOGGINGCONFIG_LEVEL], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=266, - serialized_end=597, -) - - -_HADOOPJOB_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=890, - serialized_end=939, -) - -_HADOOPJOB = _descriptor.Descriptor( - name="HadoopJob", - full_name="google.cloud.dataproc.v1beta2.HadoopJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="main_jar_file_uri", - full_name="google.cloud.dataproc.v1beta2.HadoopJob.main_jar_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="main_class", - full_name="google.cloud.dataproc.v1beta2.HadoopJob.main_class", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="args", - full_name="google.cloud.dataproc.v1beta2.HadoopJob.args", - index=2, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="jar_file_uris", - full_name="google.cloud.dataproc.v1beta2.HadoopJob.jar_file_uris", - index=3, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="file_uris", - full_name="google.cloud.dataproc.v1beta2.HadoopJob.file_uris", - index=4, - number=5, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="archive_uris", - full_name="google.cloud.dataproc.v1beta2.HadoopJob.archive_uris", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1beta2.HadoopJob.properties", - index=6, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="logging_config", - full_name="google.cloud.dataproc.v1beta2.HadoopJob.logging_config", - index=7, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_HADOOPJOB_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="driver", - full_name="google.cloud.dataproc.v1beta2.HadoopJob.driver", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=600, - serialized_end=949, -) - - -_SPARKJOB_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=890, - serialized_end=939, -) - -_SPARKJOB = _descriptor.Descriptor( - name="SparkJob", - full_name="google.cloud.dataproc.v1beta2.SparkJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="main_jar_file_uri", - full_name="google.cloud.dataproc.v1beta2.SparkJob.main_jar_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="main_class", - full_name="google.cloud.dataproc.v1beta2.SparkJob.main_class", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="args", - full_name="google.cloud.dataproc.v1beta2.SparkJob.args", - index=2, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="jar_file_uris", - full_name="google.cloud.dataproc.v1beta2.SparkJob.jar_file_uris", - index=3, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="file_uris", - full_name="google.cloud.dataproc.v1beta2.SparkJob.file_uris", - index=4, - number=5, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="archive_uris", - full_name="google.cloud.dataproc.v1beta2.SparkJob.archive_uris", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1beta2.SparkJob.properties", - index=6, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="logging_config", - full_name="google.cloud.dataproc.v1beta2.SparkJob.logging_config", - index=7, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_SPARKJOB_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="driver", - full_name="google.cloud.dataproc.v1beta2.SparkJob.driver", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=952, - serialized_end=1299, -) - - -_PYSPARKJOB_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=890, - serialized_end=939, -) - -_PYSPARKJOB = _descriptor.Descriptor( - name="PySparkJob", - full_name="google.cloud.dataproc.v1beta2.PySparkJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="main_python_file_uri", - full_name="google.cloud.dataproc.v1beta2.PySparkJob.main_python_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="args", - full_name="google.cloud.dataproc.v1beta2.PySparkJob.args", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="python_file_uris", - full_name="google.cloud.dataproc.v1beta2.PySparkJob.python_file_uris", - index=2, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="jar_file_uris", - full_name="google.cloud.dataproc.v1beta2.PySparkJob.jar_file_uris", - index=3, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="file_uris", - full_name="google.cloud.dataproc.v1beta2.PySparkJob.file_uris", - index=4, - number=5, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="archive_uris", - full_name="google.cloud.dataproc.v1beta2.PySparkJob.archive_uris", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1beta2.PySparkJob.properties", - index=6, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="logging_config", - full_name="google.cloud.dataproc.v1beta2.PySparkJob.logging_config", - index=7, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_PYSPARKJOB_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1302, - serialized_end=1653, -) - - -_QUERYLIST = _descriptor.Descriptor( - name="QueryList", - full_name="google.cloud.dataproc.v1beta2.QueryList", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="queries", - full_name="google.cloud.dataproc.v1beta2.QueryList.queries", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1655, - serialized_end=1688, -) - - -_HIVEJOB_SCRIPTVARIABLESENTRY = _descriptor.Descriptor( - name="ScriptVariablesEntry", - full_name="google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2007, - serialized_end=2061, -) - -_HIVEJOB_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=890, - serialized_end=939, -) - -_HIVEJOB = _descriptor.Descriptor( - name="HiveJob", - full_name="google.cloud.dataproc.v1beta2.HiveJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="query_file_uri", - full_name="google.cloud.dataproc.v1beta2.HiveJob.query_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="query_list", - full_name="google.cloud.dataproc.v1beta2.HiveJob.query_list", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="continue_on_failure", - full_name="google.cloud.dataproc.v1beta2.HiveJob.continue_on_failure", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="script_variables", - full_name="google.cloud.dataproc.v1beta2.HiveJob.script_variables", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1beta2.HiveJob.properties", - index=4, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="jar_file_uris", - full_name="google.cloud.dataproc.v1beta2.HiveJob.jar_file_uris", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_HIVEJOB_SCRIPTVARIABLESENTRY, _HIVEJOB_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="queries", - full_name="google.cloud.dataproc.v1beta2.HiveJob.queries", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=1691, - serialized_end=2123, -) - - -_SPARKSQLJOB_SCRIPTVARIABLESENTRY = _descriptor.Descriptor( - name="ScriptVariablesEntry", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2007, - serialized_end=2061, -) - -_SPARKSQLJOB_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=890, - serialized_end=939, -) - -_SPARKSQLJOB = _descriptor.Descriptor( - name="SparkSqlJob", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="query_file_uri", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.query_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="query_list", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.query_list", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="script_variables", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.script_variables", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.properties", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="jar_file_uris", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.jar_file_uris", - index=4, - number=56, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="logging_config", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.logging_config", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_SPARKSQLJOB_SCRIPTVARIABLESENTRY, _SPARKSQLJOB_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="queries", - full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.queries", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=2126, - serialized_end=2611, -) - - -_PIGJOB_SCRIPTVARIABLESENTRY = _descriptor.Descriptor( - name="ScriptVariablesEntry", - full_name="google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2007, - serialized_end=2061, -) - -_PIGJOB_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=890, - serialized_end=939, -) - -_PIGJOB = _descriptor.Descriptor( - name="PigJob", - full_name="google.cloud.dataproc.v1beta2.PigJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="query_file_uri", - full_name="google.cloud.dataproc.v1beta2.PigJob.query_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="query_list", - full_name="google.cloud.dataproc.v1beta2.PigJob.query_list", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="continue_on_failure", - full_name="google.cloud.dataproc.v1beta2.PigJob.continue_on_failure", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="script_variables", - full_name="google.cloud.dataproc.v1beta2.PigJob.script_variables", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1beta2.PigJob.properties", - index=4, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="jar_file_uris", - full_name="google.cloud.dataproc.v1beta2.PigJob.jar_file_uris", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="logging_config", - full_name="google.cloud.dataproc.v1beta2.PigJob.logging_config", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_PIGJOB_SCRIPTVARIABLESENTRY, _PIGJOB_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="queries", - full_name="google.cloud.dataproc.v1beta2.PigJob.queries", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=2614, - serialized_end=3113, -) - - -_SPARKRJOB_PROPERTIESENTRY = _descriptor.Descriptor( - name="PropertiesEntry", - full_name="google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=890, - serialized_end=939, -) - -_SPARKRJOB = _descriptor.Descriptor( - name="SparkRJob", - full_name="google.cloud.dataproc.v1beta2.SparkRJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="main_r_file_uri", - full_name="google.cloud.dataproc.v1beta2.SparkRJob.main_r_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="args", - full_name="google.cloud.dataproc.v1beta2.SparkRJob.args", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="file_uris", - full_name="google.cloud.dataproc.v1beta2.SparkRJob.file_uris", - index=2, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="archive_uris", - full_name="google.cloud.dataproc.v1beta2.SparkRJob.archive_uris", - index=3, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.cloud.dataproc.v1beta2.SparkRJob.properties", - index=4, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="logging_config", - full_name="google.cloud.dataproc.v1beta2.SparkRJob.logging_config", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_SPARKRJOB_PROPERTIESENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3116, - serialized_end=3411, -) - - -_JOBPLACEMENT = _descriptor.Descriptor( - name="JobPlacement", - full_name="google.cloud.dataproc.v1beta2.JobPlacement", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1beta2.JobPlacement.cluster_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_uuid", - full_name="google.cloud.dataproc.v1beta2.JobPlacement.cluster_uuid", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3413, - serialized_end=3476, -) - - -_JOBSTATUS = _descriptor.Descriptor( - name="JobStatus", - full_name="google.cloud.dataproc.v1beta2.JobStatus", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.dataproc.v1beta2.JobStatus.state", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="details", - full_name="google.cloud.dataproc.v1beta2.JobStatus.details", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="state_start_time", - full_name="google.cloud.dataproc.v1beta2.JobStatus.state_start_time", - index=2, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="substate", - full_name="google.cloud.dataproc.v1beta2.JobStatus.substate", - index=3, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_JOBSTATUS_STATE, _JOBSTATUS_SUBSTATE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3479, - serialized_end=3939, -) - - -_JOBREFERENCE = _descriptor.Descriptor( - name="JobReference", - full_name="google.cloud.dataproc.v1beta2.JobReference", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.JobReference.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_id", - full_name="google.cloud.dataproc.v1beta2.JobReference.job_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3941, - serialized_end=3996, -) - - -_YARNAPPLICATION = _descriptor.Descriptor( - name="YarnApplication", - full_name="google.cloud.dataproc.v1beta2.YarnApplication", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.dataproc.v1beta2.YarnApplication.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.dataproc.v1beta2.YarnApplication.state", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="progress", - full_name="google.cloud.dataproc.v1beta2.YarnApplication.progress", - index=2, - number=3, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="tracking_url", - full_name="google.cloud.dataproc.v1beta2.YarnApplication.tracking_url", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_YARNAPPLICATION_STATE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3999, - serialized_end=4297, -) - - -_JOB_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.cloud.dataproc.v1beta2.Job.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.Job.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.Job.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5323, - serialized_end=5368, -) - -_JOB = _descriptor.Descriptor( - name="Job", - full_name="google.cloud.dataproc.v1beta2.Job", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="reference", - full_name="google.cloud.dataproc.v1beta2.Job.reference", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="placement", - full_name="google.cloud.dataproc.v1beta2.Job.placement", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="hadoop_job", - full_name="google.cloud.dataproc.v1beta2.Job.hadoop_job", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="spark_job", - full_name="google.cloud.dataproc.v1beta2.Job.spark_job", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="pyspark_job", - full_name="google.cloud.dataproc.v1beta2.Job.pyspark_job", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="hive_job", - full_name="google.cloud.dataproc.v1beta2.Job.hive_job", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="pig_job", - full_name="google.cloud.dataproc.v1beta2.Job.pig_job", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="spark_r_job", - full_name="google.cloud.dataproc.v1beta2.Job.spark_r_job", - index=7, - number=21, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="spark_sql_job", - full_name="google.cloud.dataproc.v1beta2.Job.spark_sql_job", - index=8, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status", - full_name="google.cloud.dataproc.v1beta2.Job.status", - index=9, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status_history", - full_name="google.cloud.dataproc.v1beta2.Job.status_history", - index=10, - number=13, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="yarn_applications", - full_name="google.cloud.dataproc.v1beta2.Job.yarn_applications", - index=11, - number=9, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="submitted_by", - full_name="google.cloud.dataproc.v1beta2.Job.submitted_by", - index=12, - number=10, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="driver_output_resource_uri", - full_name="google.cloud.dataproc.v1beta2.Job.driver_output_resource_uri", - index=13, - number=17, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="driver_control_files_uri", - full_name="google.cloud.dataproc.v1beta2.Job.driver_control_files_uri", - index=14, - number=15, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.cloud.dataproc.v1beta2.Job.labels", - index=15, - number=18, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="scheduling", - full_name="google.cloud.dataproc.v1beta2.Job.scheduling", - index=16, - number=20, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_uuid", - full_name="google.cloud.dataproc.v1beta2.Job.job_uuid", - index=17, - number=22, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_JOB_LABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type_job", - full_name="google.cloud.dataproc.v1beta2.Job.type_job", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=4300, - serialized_end=5380, -) - - -_JOBSCHEDULING = _descriptor.Descriptor( - name="JobScheduling", - full_name="google.cloud.dataproc.v1beta2.JobScheduling", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="max_failures_per_hour", - full_name="google.cloud.dataproc.v1beta2.JobScheduling.max_failures_per_hour", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5382, - serialized_end=5428, -) - - -_SUBMITJOBREQUEST = _descriptor.Descriptor( - name="SubmitJobRequest", - full_name="google.cloud.dataproc.v1beta2.SubmitJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.SubmitJobRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1beta2.SubmitJobRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job", - full_name="google.cloud.dataproc.v1beta2.SubmitJobRequest.job", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_id", - full_name="google.cloud.dataproc.v1beta2.SubmitJobRequest.request_id", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5431, - serialized_end=5569, -) - - -_GETJOBREQUEST = _descriptor.Descriptor( - name="GetJobRequest", - full_name="google.cloud.dataproc.v1beta2.GetJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.GetJobRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1beta2.GetJobRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_id", - full_name="google.cloud.dataproc.v1beta2.GetJobRequest.job_id", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5571, - serialized_end=5653, -) - - -_LISTJOBSREQUEST = _descriptor.Descriptor( - name="ListJobsRequest", - full_name="google.cloud.dataproc.v1beta2.ListJobsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.region", - index=1, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.page_size", - index=2, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.page_token", - index=3, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.cluster_name", - index=4, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_state_matcher", - full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.job_state_matcher", - index=5, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.filter", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_LISTJOBSREQUEST_JOBSTATEMATCHER], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5656, - serialized_end=5943, -) - - -_UPDATEJOBREQUEST = _descriptor.Descriptor( - name="UpdateJobRequest", - full_name="google.cloud.dataproc.v1beta2.UpdateJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.UpdateJobRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1beta2.UpdateJobRequest.region", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_id", - full_name="google.cloud.dataproc.v1beta2.UpdateJobRequest.job_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job", - full_name="google.cloud.dataproc.v1beta2.UpdateJobRequest.job", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.dataproc.v1beta2.UpdateJobRequest.update_mask", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5946, - serialized_end=6139, -) - - -_LISTJOBSRESPONSE = _descriptor.Descriptor( - name="ListJobsResponse", - full_name="google.cloud.dataproc.v1beta2.ListJobsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="jobs", - full_name="google.cloud.dataproc.v1beta2.ListJobsResponse.jobs", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.dataproc.v1beta2.ListJobsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6141, - serialized_end=6234, -) - - -_CANCELJOBREQUEST = _descriptor.Descriptor( - name="CancelJobRequest", - full_name="google.cloud.dataproc.v1beta2.CancelJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.CancelJobRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1beta2.CancelJobRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_id", - full_name="google.cloud.dataproc.v1beta2.CancelJobRequest.job_id", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6236, - serialized_end=6321, -) - - -_DELETEJOBREQUEST = _descriptor.Descriptor( - name="DeleteJobRequest", - full_name="google.cloud.dataproc.v1beta2.DeleteJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.dataproc.v1beta2.DeleteJobRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region", - full_name="google.cloud.dataproc.v1beta2.DeleteJobRequest.region", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_id", - full_name="google.cloud.dataproc.v1beta2.DeleteJobRequest.job_id", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6323, - serialized_end=6408, -) - -_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY.fields_by_name[ - "value" -].enum_type = _LOGGINGCONFIG_LEVEL -_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY.containing_type = _LOGGINGCONFIG -_LOGGINGCONFIG.fields_by_name[ - "driver_log_levels" -].message_type = _LOGGINGCONFIG_DRIVERLOGLEVELSENTRY -_LOGGINGCONFIG_LEVEL.containing_type = _LOGGINGCONFIG -_HADOOPJOB_PROPERTIESENTRY.containing_type = _HADOOPJOB -_HADOOPJOB.fields_by_name["properties"].message_type = _HADOOPJOB_PROPERTIESENTRY -_HADOOPJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG -_HADOOPJOB.oneofs_by_name["driver"].fields.append( - _HADOOPJOB.fields_by_name["main_jar_file_uri"] -) -_HADOOPJOB.fields_by_name[ - "main_jar_file_uri" -].containing_oneof = _HADOOPJOB.oneofs_by_name["driver"] -_HADOOPJOB.oneofs_by_name["driver"].fields.append( - _HADOOPJOB.fields_by_name["main_class"] -) -_HADOOPJOB.fields_by_name["main_class"].containing_oneof = _HADOOPJOB.oneofs_by_name[ - "driver" -] -_SPARKJOB_PROPERTIESENTRY.containing_type = _SPARKJOB -_SPARKJOB.fields_by_name["properties"].message_type = _SPARKJOB_PROPERTIESENTRY -_SPARKJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG -_SPARKJOB.oneofs_by_name["driver"].fields.append( - _SPARKJOB.fields_by_name["main_jar_file_uri"] -) -_SPARKJOB.fields_by_name[ - "main_jar_file_uri" -].containing_oneof = _SPARKJOB.oneofs_by_name["driver"] -_SPARKJOB.oneofs_by_name["driver"].fields.append(_SPARKJOB.fields_by_name["main_class"]) -_SPARKJOB.fields_by_name["main_class"].containing_oneof = _SPARKJOB.oneofs_by_name[ - "driver" -] -_PYSPARKJOB_PROPERTIESENTRY.containing_type = _PYSPARKJOB -_PYSPARKJOB.fields_by_name["properties"].message_type = _PYSPARKJOB_PROPERTIESENTRY -_PYSPARKJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG -_HIVEJOB_SCRIPTVARIABLESENTRY.containing_type = _HIVEJOB -_HIVEJOB_PROPERTIESENTRY.containing_type = _HIVEJOB -_HIVEJOB.fields_by_name["query_list"].message_type = _QUERYLIST -_HIVEJOB.fields_by_name["script_variables"].message_type = _HIVEJOB_SCRIPTVARIABLESENTRY -_HIVEJOB.fields_by_name["properties"].message_type = _HIVEJOB_PROPERTIESENTRY -_HIVEJOB.oneofs_by_name["queries"].fields.append( - _HIVEJOB.fields_by_name["query_file_uri"] -) -_HIVEJOB.fields_by_name["query_file_uri"].containing_oneof = _HIVEJOB.oneofs_by_name[ - "queries" -] -_HIVEJOB.oneofs_by_name["queries"].fields.append(_HIVEJOB.fields_by_name["query_list"]) -_HIVEJOB.fields_by_name["query_list"].containing_oneof = _HIVEJOB.oneofs_by_name[ - "queries" -] -_SPARKSQLJOB_SCRIPTVARIABLESENTRY.containing_type = _SPARKSQLJOB -_SPARKSQLJOB_PROPERTIESENTRY.containing_type = _SPARKSQLJOB -_SPARKSQLJOB.fields_by_name["query_list"].message_type = _QUERYLIST -_SPARKSQLJOB.fields_by_name[ - "script_variables" -].message_type = _SPARKSQLJOB_SCRIPTVARIABLESENTRY -_SPARKSQLJOB.fields_by_name["properties"].message_type = _SPARKSQLJOB_PROPERTIESENTRY -_SPARKSQLJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG -_SPARKSQLJOB.oneofs_by_name["queries"].fields.append( - _SPARKSQLJOB.fields_by_name["query_file_uri"] -) -_SPARKSQLJOB.fields_by_name[ - "query_file_uri" -].containing_oneof = _SPARKSQLJOB.oneofs_by_name["queries"] -_SPARKSQLJOB.oneofs_by_name["queries"].fields.append( - _SPARKSQLJOB.fields_by_name["query_list"] -) -_SPARKSQLJOB.fields_by_name[ - "query_list" -].containing_oneof = _SPARKSQLJOB.oneofs_by_name["queries"] -_PIGJOB_SCRIPTVARIABLESENTRY.containing_type = _PIGJOB -_PIGJOB_PROPERTIESENTRY.containing_type = _PIGJOB -_PIGJOB.fields_by_name["query_list"].message_type = _QUERYLIST -_PIGJOB.fields_by_name["script_variables"].message_type = _PIGJOB_SCRIPTVARIABLESENTRY -_PIGJOB.fields_by_name["properties"].message_type = _PIGJOB_PROPERTIESENTRY -_PIGJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG -_PIGJOB.oneofs_by_name["queries"].fields.append( - _PIGJOB.fields_by_name["query_file_uri"] -) -_PIGJOB.fields_by_name["query_file_uri"].containing_oneof = _PIGJOB.oneofs_by_name[ - "queries" -] -_PIGJOB.oneofs_by_name["queries"].fields.append(_PIGJOB.fields_by_name["query_list"]) -_PIGJOB.fields_by_name["query_list"].containing_oneof = _PIGJOB.oneofs_by_name[ - "queries" -] -_SPARKRJOB_PROPERTIESENTRY.containing_type = _SPARKRJOB -_SPARKRJOB.fields_by_name["properties"].message_type = _SPARKRJOB_PROPERTIESENTRY -_SPARKRJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG -_JOBSTATUS.fields_by_name["state"].enum_type = _JOBSTATUS_STATE -_JOBSTATUS.fields_by_name[ - "state_start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_JOBSTATUS.fields_by_name["substate"].enum_type = _JOBSTATUS_SUBSTATE -_JOBSTATUS_STATE.containing_type = _JOBSTATUS -_JOBSTATUS_SUBSTATE.containing_type = _JOBSTATUS -_YARNAPPLICATION.fields_by_name["state"].enum_type = _YARNAPPLICATION_STATE -_YARNAPPLICATION_STATE.containing_type = _YARNAPPLICATION -_JOB_LABELSENTRY.containing_type = _JOB -_JOB.fields_by_name["reference"].message_type = _JOBREFERENCE -_JOB.fields_by_name["placement"].message_type = _JOBPLACEMENT -_JOB.fields_by_name["hadoop_job"].message_type = _HADOOPJOB -_JOB.fields_by_name["spark_job"].message_type = _SPARKJOB -_JOB.fields_by_name["pyspark_job"].message_type = _PYSPARKJOB -_JOB.fields_by_name["hive_job"].message_type = _HIVEJOB -_JOB.fields_by_name["pig_job"].message_type = _PIGJOB -_JOB.fields_by_name["spark_r_job"].message_type = _SPARKRJOB -_JOB.fields_by_name["spark_sql_job"].message_type = _SPARKSQLJOB -_JOB.fields_by_name["status"].message_type = _JOBSTATUS -_JOB.fields_by_name["status_history"].message_type = _JOBSTATUS -_JOB.fields_by_name["yarn_applications"].message_type = _YARNAPPLICATION -_JOB.fields_by_name["labels"].message_type = _JOB_LABELSENTRY -_JOB.fields_by_name["scheduling"].message_type = _JOBSCHEDULING -_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["hadoop_job"]) -_JOB.fields_by_name["hadoop_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] -_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["spark_job"]) -_JOB.fields_by_name["spark_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] -_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["pyspark_job"]) -_JOB.fields_by_name["pyspark_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] -_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["hive_job"]) -_JOB.fields_by_name["hive_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] -_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["pig_job"]) -_JOB.fields_by_name["pig_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] -_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["spark_r_job"]) -_JOB.fields_by_name["spark_r_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] -_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["spark_sql_job"]) -_JOB.fields_by_name["spark_sql_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] -_SUBMITJOBREQUEST.fields_by_name["job"].message_type = _JOB -_LISTJOBSREQUEST.fields_by_name[ - "job_state_matcher" -].enum_type = _LISTJOBSREQUEST_JOBSTATEMATCHER -_LISTJOBSREQUEST_JOBSTATEMATCHER.containing_type = _LISTJOBSREQUEST -_UPDATEJOBREQUEST.fields_by_name["job"].message_type = _JOB -_UPDATEJOBREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTJOBSRESPONSE.fields_by_name["jobs"].message_type = _JOB -DESCRIPTOR.message_types_by_name["LoggingConfig"] = _LOGGINGCONFIG -DESCRIPTOR.message_types_by_name["HadoopJob"] = _HADOOPJOB -DESCRIPTOR.message_types_by_name["SparkJob"] = _SPARKJOB -DESCRIPTOR.message_types_by_name["PySparkJob"] = _PYSPARKJOB -DESCRIPTOR.message_types_by_name["QueryList"] = _QUERYLIST -DESCRIPTOR.message_types_by_name["HiveJob"] = _HIVEJOB -DESCRIPTOR.message_types_by_name["SparkSqlJob"] = _SPARKSQLJOB -DESCRIPTOR.message_types_by_name["PigJob"] = _PIGJOB -DESCRIPTOR.message_types_by_name["SparkRJob"] = _SPARKRJOB -DESCRIPTOR.message_types_by_name["JobPlacement"] = _JOBPLACEMENT -DESCRIPTOR.message_types_by_name["JobStatus"] = _JOBSTATUS -DESCRIPTOR.message_types_by_name["JobReference"] = _JOBREFERENCE -DESCRIPTOR.message_types_by_name["YarnApplication"] = _YARNAPPLICATION -DESCRIPTOR.message_types_by_name["Job"] = _JOB -DESCRIPTOR.message_types_by_name["JobScheduling"] = _JOBSCHEDULING -DESCRIPTOR.message_types_by_name["SubmitJobRequest"] = _SUBMITJOBREQUEST -DESCRIPTOR.message_types_by_name["GetJobRequest"] = _GETJOBREQUEST -DESCRIPTOR.message_types_by_name["ListJobsRequest"] = _LISTJOBSREQUEST -DESCRIPTOR.message_types_by_name["UpdateJobRequest"] = _UPDATEJOBREQUEST -DESCRIPTOR.message_types_by_name["ListJobsResponse"] = _LISTJOBSRESPONSE -DESCRIPTOR.message_types_by_name["CancelJobRequest"] = _CANCELJOBREQUEST -DESCRIPTOR.message_types_by_name["DeleteJobRequest"] = _DELETEJOBREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LoggingConfig = _reflection.GeneratedProtocolMessageType( - "LoggingConfig", - (_message.Message,), - dict( - DriverLogLevelsEntry=_reflection.GeneratedProtocolMessageType( - "DriverLogLevelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry) - ), - ), - DESCRIPTOR=_LOGGINGCONFIG, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""The runtime logging config of the job. - - - Attributes: - driver_log_levels: - The per-package log levels for the driver. This may include - "root" package name to configure rootLogger. Examples: - 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.LoggingConfig) - ), -) -_sym_db.RegisterMessage(LoggingConfig) -_sym_db.RegisterMessage(LoggingConfig.DriverLogLevelsEntry) - -HadoopJob = _reflection.GeneratedProtocolMessageType( - "HadoopJob", - (_message.Message,), - dict( - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_HADOOPJOB_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry) - ), - ), - DESCRIPTOR=_HADOOPJOB, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache Hadoop - MapReduce `__ - jobs on `Apache Hadoop - YARN `__. - - - Attributes: - driver: - Required. Indicates the location of the driver's main class. - Specify either the jar file that contains the main class or - the main class name. To specify both, add the jar file to - ``jar_file_uris``, and then specify the main class name in - this property. - main_jar_file_uri: - The HCFS URI of the jar file containing the main class. - Examples: 'gs://foo-bucket/analytics-binaries/extract-useful- - metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' - 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce- - examples.jar' - main_class: - The name of the driver's main class. The jar file containing - the class must be in the default CLASSPATH or specified in - ``jar_file_uris``. - args: - Optional. The arguments to pass to the driver. Do not include - arguments, such as ``-libjars`` or ``-Dfoo=bar``, that can be - set as job properties, since a collision may occur that causes - an incorrect job submission. - jar_file_uris: - Optional. Jar file URIs to add to the CLASSPATHs of the Hadoop - driver and tasks. - file_uris: - Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to - be copied to the working directory of Hadoop drivers and - distributed tasks. Useful for naively parallel tasks. - archive_uris: - Optional. HCFS URIs of archives to be extracted in the working - directory of Hadoop drivers and tasks. Supported file types: - .jar, .tar, .tar.gz, .tgz, or .zip. - properties: - Optional. A mapping of property names to values, used to - configure Hadoop. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/hadoop/conf/\*-site and classes in user - code. - logging_config: - Optional. The runtime log config for job execution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.HadoopJob) - ), -) -_sym_db.RegisterMessage(HadoopJob) -_sym_db.RegisterMessage(HadoopJob.PropertiesEntry) - -SparkJob = _reflection.GeneratedProtocolMessageType( - "SparkJob", - (_message.Message,), - dict( - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_SPARKJOB_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry) - ), - ), - DESCRIPTOR=_SPARKJOB, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache - Spark `__ applications on YARN. The - specification of the main method to call to drive the job. Specify - either the jar file that contains the main class or the main class name. - To pass both a main jar and a main class in that jar, add the jar to - ``CommonJob.jar_file_uris``, and then specify the main class name in - ``main_class``. - - - Attributes: - main_jar_file_uri: - The HCFS URI of the jar file that contains the main class. - main_class: - The name of the driver's main class. The jar file that - contains the class must be in the default CLASSPATH or - specified in ``jar_file_uris``. - args: - Optional. The arguments to pass to the driver. Do not include - arguments, such as ``--conf``, that can be set as job - properties, since a collision may occur that causes an - incorrect job submission. - jar_file_uris: - Optional. HCFS URIs of jar files to add to the CLASSPATHs of - the Spark driver and tasks. - file_uris: - Optional. HCFS URIs of files to be copied to the working - directory of Spark drivers and distributed tasks. Useful for - naively parallel tasks. - archive_uris: - Optional. HCFS URIs of archives to be extracted in the working - directory of Spark drivers and tasks. Supported file types: - .jar, .tar, .tar.gz, .tgz, and .zip. - properties: - Optional. A mapping of property names to values, used to - configure Spark. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/spark/conf/spark-defaults.conf and - classes in user code. - logging_config: - Optional. The runtime log config for job execution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkJob) - ), -) -_sym_db.RegisterMessage(SparkJob) -_sym_db.RegisterMessage(SparkJob.PropertiesEntry) - -PySparkJob = _reflection.GeneratedProtocolMessageType( - "PySparkJob", - (_message.Message,), - dict( - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_PYSPARKJOB_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry) - ), - ), - DESCRIPTOR=_PYSPARKJOB, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache - PySpark `__ - applications on YARN. - - - Attributes: - main_python_file_uri: - Required. The HCFS URI of the main Python file to use as the - driver. Must be a .py file. - args: - Optional. The arguments to pass to the driver. Do not include - arguments, such as ``--conf``, that can be set as job - properties, since a collision may occur that causes an - incorrect job submission. - python_file_uris: - Optional. HCFS file URIs of Python files to pass to the - PySpark framework. Supported file types: .py, .egg, and .zip. - jar_file_uris: - Optional. HCFS URIs of jar files to add to the CLASSPATHs of - the Python driver and tasks. - file_uris: - Optional. HCFS URIs of files to be copied to the working - directory of Python drivers and distributed tasks. Useful for - naively parallel tasks. - archive_uris: - Optional. HCFS URIs of archives to be extracted in the working - directory of .jar, .tar, .tar.gz, .tgz, and .zip. - properties: - Optional. A mapping of property names to values, used to - configure PySpark. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/spark/conf/spark-defaults.conf and - classes in user code. - logging_config: - Optional. The runtime log config for job execution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.PySparkJob) - ), -) -_sym_db.RegisterMessage(PySparkJob) -_sym_db.RegisterMessage(PySparkJob.PropertiesEntry) - -QueryList = _reflection.GeneratedProtocolMessageType( - "QueryList", - (_message.Message,), - dict( - DESCRIPTOR=_QUERYLIST, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A list of queries to run on a cluster. - - - Attributes: - queries: - Required. The queries to execute. You do not need to terminate - a query with a semicolon. Multiple queries can be specified in - one string by separating each with a semicolon. Here is an - example of an Cloud Dataproc API snippet that uses a QueryList - to specify a HiveJob: :: "hiveJob": { "queryList": - { "queries": [ "query1", "query2", - "query3;query4", ] } } - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.QueryList) - ), -) -_sym_db.RegisterMessage(QueryList) - -HiveJob = _reflection.GeneratedProtocolMessageType( - "HiveJob", - (_message.Message,), - dict( - ScriptVariablesEntry=_reflection.GeneratedProtocolMessageType( - "ScriptVariablesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_HIVEJOB_SCRIPTVARIABLESENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry) - ), - ), - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_HIVEJOB_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry) - ), - ), - DESCRIPTOR=_HIVEJOB, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache - Hive `__ queries on YARN. - - - Attributes: - queries: - Required. The sequence of Hive queries to execute, specified - as either an HCFS file URI or a list of queries. - query_file_uri: - The HCFS URI of the script that contains Hive queries. - query_list: - A list of queries. - continue_on_failure: - Optional. Whether to continue executing queries if a query - fails. The default value is ``false``. Setting to ``true`` can - be useful when executing independent parallel queries. - script_variables: - Optional. Mapping of query variable names to values - (equivalent to the Hive command: ``SET name="value";``). - properties: - Optional. A mapping of property names and values, used to - configure Hive. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/hadoop/conf/\*-site.xml, - /etc/hive/conf/hive-site.xml, and classes in user code. - jar_file_uris: - Optional. HCFS URIs of jar files to add to the CLASSPATH of - the Hive server and Hadoop MapReduce (MR) tasks. Can contain - Hive SerDes and UDFs. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.HiveJob) - ), -) -_sym_db.RegisterMessage(HiveJob) -_sym_db.RegisterMessage(HiveJob.ScriptVariablesEntry) -_sym_db.RegisterMessage(HiveJob.PropertiesEntry) - -SparkSqlJob = _reflection.GeneratedProtocolMessageType( - "SparkSqlJob", - (_message.Message,), - dict( - ScriptVariablesEntry=_reflection.GeneratedProtocolMessageType( - "ScriptVariablesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_SPARKSQLJOB_SCRIPTVARIABLESENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry) - ), - ), - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_SPARKSQLJOB_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry) - ), - ), - DESCRIPTOR=_SPARKSQLJOB, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache Spark - SQL `__ queries. - - - Attributes: - queries: - Required. The sequence of Spark SQL queries to execute, - specified as either an HCFS file URI or as a list of queries. - query_file_uri: - The HCFS URI of the script that contains SQL queries. - query_list: - A list of queries. - script_variables: - Optional. Mapping of query variable names to values - (equivalent to the Spark SQL command: SET ``name="value";``). - properties: - Optional. A mapping of property names to values, used to - configure Spark SQL's SparkConf. Properties that conflict with - values set by the Cloud Dataproc API may be overwritten. - jar_file_uris: - Optional. HCFS URIs of jar files to be added to the Spark - CLASSPATH. - logging_config: - Optional. The runtime log config for job execution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkSqlJob) - ), -) -_sym_db.RegisterMessage(SparkSqlJob) -_sym_db.RegisterMessage(SparkSqlJob.ScriptVariablesEntry) -_sym_db.RegisterMessage(SparkSqlJob.PropertiesEntry) - -PigJob = _reflection.GeneratedProtocolMessageType( - "PigJob", - (_message.Message,), - dict( - ScriptVariablesEntry=_reflection.GeneratedProtocolMessageType( - "ScriptVariablesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_PIGJOB_SCRIPTVARIABLESENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry) - ), - ), - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_PIGJOB_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry) - ), - ), - DESCRIPTOR=_PIGJOB, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache - Pig `__ queries on YARN. - - - Attributes: - queries: - Required. The sequence of Pig queries to execute, specified as - an HCFS file URI or a list of queries. - query_file_uri: - The HCFS URI of the script that contains the Pig queries. - query_list: - A list of queries. - continue_on_failure: - Optional. Whether to continue executing queries if a query - fails. The default value is ``false``. Setting to ``true`` can - be useful when executing independent parallel queries. - script_variables: - Optional. Mapping of query variable names to values - (equivalent to the Pig command: ``name=[value]``). - properties: - Optional. A mapping of property names to values, used to - configure Pig. Properties that conflict with values set by the - Cloud Dataproc API may be overwritten. Can include properties - set in /etc/hadoop/conf/\*-site.xml, - /etc/pig/conf/pig.properties, and classes in user code. - jar_file_uris: - Optional. HCFS URIs of jar files to add to the CLASSPATH of - the Pig Client and Hadoop MapReduce (MR) tasks. Can contain - Pig UDFs. - logging_config: - Optional. The runtime log config for job execution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.PigJob) - ), -) -_sym_db.RegisterMessage(PigJob) -_sym_db.RegisterMessage(PigJob.ScriptVariablesEntry) -_sym_db.RegisterMessage(PigJob.PropertiesEntry) - -SparkRJob = _reflection.GeneratedProtocolMessageType( - "SparkRJob", - (_message.Message,), - dict( - PropertiesEntry=_reflection.GeneratedProtocolMessageType( - "PropertiesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_SPARKRJOB_PROPERTIESENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry) - ), - ), - DESCRIPTOR=_SPARKRJOB, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache - SparkR `__ - applications on YARN. - - - Attributes: - main_r_file_uri: - Required. The HCFS URI of the main R file to use as the - driver. Must be a .R file. - args: - Optional. The arguments to pass to the driver. Do not include - arguments, such as ``--conf``, that can be set as job - properties, since a collision may occur that causes an - incorrect job submission. - file_uris: - Optional. HCFS URIs of files to be copied to the working - directory of R drivers and distributed tasks. Useful for - naively parallel tasks. - archive_uris: - Optional. HCFS URIs of archives to be extracted in the working - directory of Spark drivers and tasks. Supported file types: - .jar, .tar, .tar.gz, .tgz, and .zip. - properties: - Optional. A mapping of property names to values, used to - configure SparkR. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/spark/conf/spark-defaults.conf and - classes in user code. - logging_config: - Optional. The runtime log config for job execution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkRJob) - ), -) -_sym_db.RegisterMessage(SparkRJob) -_sym_db.RegisterMessage(SparkRJob.PropertiesEntry) - -JobPlacement = _reflection.GeneratedProtocolMessageType( - "JobPlacement", - (_message.Message,), - dict( - DESCRIPTOR=_JOBPLACEMENT, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""Cloud Dataproc job config. - - - Attributes: - cluster_name: - Required. The name of the cluster where the job will be - submitted. - cluster_uuid: - Output only. A cluster UUID generated by the Cloud Dataproc - service when the job is submitted. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.JobPlacement) - ), -) -_sym_db.RegisterMessage(JobPlacement) - -JobStatus = _reflection.GeneratedProtocolMessageType( - "JobStatus", - (_message.Message,), - dict( - DESCRIPTOR=_JOBSTATUS, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""Cloud Dataproc job status. - - - Attributes: - state: - Output only. A state message specifying the overall job state. - details: - Output only. Optional job state details, such as an error - description if the state is ERROR. - state_start_time: - Output only. The time when this state was entered. - substate: - Output only. Additional state information, which includes - status reported by the agent. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.JobStatus) - ), -) -_sym_db.RegisterMessage(JobStatus) - -JobReference = _reflection.GeneratedProtocolMessageType( - "JobReference", - (_message.Message,), - dict( - DESCRIPTOR=_JOBREFERENCE, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""Encapsulates the full scoping used to reference a job. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - job_id: - Optional. The job ID, which must be unique within the project. - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (\_), or hyphens (-). The maximum length is 100 - characters. If not specified by the caller, the job ID will - be provided by the server. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.JobReference) - ), -) -_sym_db.RegisterMessage(JobReference) - -YarnApplication = _reflection.GeneratedProtocolMessageType( - "YarnApplication", - (_message.Message,), - dict( - DESCRIPTOR=_YARNAPPLICATION, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A YARN application created by a job. Application - information is a subset of - org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. - - **Beta Feature**: This report is available for testing purposes only. It - may be changed before final release. - - - Attributes: - name: - Output only. The application name. - state: - Output only. The application state. - progress: - Output only. The numerical progress of the application, from 1 - to 100. - tracking_url: - Optional. Output only. The HTTP URL of the ApplicationMaster, - HistoryServer, or TimelineServer that provides application- - specific information. The URL uses the internal hostname, and - requires a proxy server for resolution and, possibly, access. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.YarnApplication) - ), -) -_sym_db.RegisterMessage(YarnApplication) - -Job = _reflection.GeneratedProtocolMessageType( - "Job", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_JOB_LABELSENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.Job.LabelsEntry) - ), - ), - DESCRIPTOR=_JOB, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job resource. - - - Attributes: - reference: - Optional. The fully qualified reference to the job, which can - be used to obtain the equivalent REST path of the job - resource. If this property is not specified when a job is - created, the server generates a job\_id. - placement: - Required. Job information, including how, when, and where to - run the job. - type_job: - Required. The application/framework-specific portion of the - job. - hadoop_job: - Job is a Hadoop job. - spark_job: - Job is a Spark job. - pyspark_job: - Job is a Pyspark job. - hive_job: - Job is a Hive job. - pig_job: - Job is a Pig job. - spark_r_job: - Job is a SparkR job. - spark_sql_job: - Job is a SparkSql job. - status: - Output only. The job status. Additional application-specific - status information may be contained in the type\_job and - yarn\_applications fields. - status_history: - Output only. The previous job status. - yarn_applications: - Output only. The collection of YARN applications spun up by - this job. **Beta** Feature: This report is available for - testing purposes only. It may be changed before final release. - submitted_by: - Output only. The email address of the user submitting the job. - For jobs submitted on the cluster, the address is - username@hostname. - driver_output_resource_uri: - Output only. A URI pointing to the location of the stdout of - the job's driver program. - driver_control_files_uri: - Output only. If present, the location of miscellaneous control - files which may be used as part of job setup and handling. If - not present, control files may be placed in the same location - as ``driver_output_uri``. - labels: - Optional. The labels to associate with this job. Label - **keys** must contain 1 to 63 characters, and must conform to - `RFC 1035 `__. Label - **values** may be empty, but, if present, must contain 1 to 63 - characters, and must conform to `RFC 1035 - `__. No more than 32 - labels can be associated with a job. - scheduling: - Optional. Job scheduling configuration. - job_uuid: - Output only. A UUID that uniquely identifies a job within the - project over time. This is in contrast to a user-settable - reference.job\_id that may be reused over time. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.Job) - ), -) -_sym_db.RegisterMessage(Job) -_sym_db.RegisterMessage(Job.LabelsEntry) - -JobScheduling = _reflection.GeneratedProtocolMessageType( - "JobScheduling", - (_message.Message,), - dict( - DESCRIPTOR=_JOBSCHEDULING, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""Job scheduling options. - - - Attributes: - max_failures_per_hour: - Optional. Maximum number of times per hour a driver may be - restarted as a result of driver terminating with non-zero code - before job is reported failed. A job may be reported as - thrashing if driver exits with non-zero code 4 times within 10 - minute window. Maximum value is 10. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.JobScheduling) - ), -) -_sym_db.RegisterMessage(JobScheduling) - -SubmitJobRequest = _reflection.GeneratedProtocolMessageType( - "SubmitJobRequest", - (_message.Message,), - dict( - DESCRIPTOR=_SUBMITJOBREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A request to submit a job. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - region: - Required. The Cloud Dataproc region in which to handle the - request. - job: - Required. The job resource. - request_id: - Optional. A unique id used to identify the request. If the - server receives two [SubmitJobRequest][google.cloud.dataproc.v - 1beta2.SubmitJobRequest] requests with the same id, then the - second request will be ignored and the first - [Job][google.cloud.dataproc.v1beta2.Job] created and stored in - the backend is returned. It is recommended to always set this - value to a `UUID `__. The id must contain only letters (a-z, - A-Z), numbers (0-9), underscores (\_), and hyphens (-). The - maximum length is 40 characters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SubmitJobRequest) - ), -) -_sym_db.RegisterMessage(SubmitJobRequest) - -GetJobRequest = _reflection.GeneratedProtocolMessageType( - "GetJobRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETJOBREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A request to get the resource representation for a job in - a project. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - region: - Required. The Cloud Dataproc region in which to handle the - request. - job_id: - Required. The job ID. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.GetJobRequest) - ), -) -_sym_db.RegisterMessage(GetJobRequest) - -ListJobsRequest = _reflection.GeneratedProtocolMessageType( - "ListJobsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTJOBSREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A request to list jobs in a project. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - region: - Required. The Cloud Dataproc region in which to handle the - request. - page_size: - Optional. The number of results to return in each response. - page_token: - Optional. The page token, returned by a previous call, to - request the next page of results. - cluster_name: - Optional. If set, the returned jobs list includes only jobs - that were submitted to the named cluster. - job_state_matcher: - Optional. Specifies enumerated categories of jobs to list. - (default = match ALL jobs). If ``filter`` is provided, - ``jobStateMatcher`` will be ignored. - filter: - Optional. A filter constraining the jobs to list. Filters are - case-sensitive and have the following syntax: [field = value] - AND [field [= value]] ... where **field** is ``status.state`` - or ``labels.[KEY]``, and ``[KEY]`` is a label key. **value** - can be ``*`` to match all values. ``status.state`` can be - either ``ACTIVE`` or ``NON_ACTIVE``. Only the logical ``AND`` - operator is supported; space-separated items are treated as - having an implicit ``AND`` operator. Example filter: - status.state = ACTIVE AND labels.env = staging AND - labels.starred = \* - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ListJobsRequest) - ), -) -_sym_db.RegisterMessage(ListJobsRequest) - -UpdateJobRequest = _reflection.GeneratedProtocolMessageType( - "UpdateJobRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATEJOBREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A request to update a job. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - region: - Required. The Cloud Dataproc region in which to handle the - request. - job_id: - Required. The job ID. - job: - Required. The changes to the job. - update_mask: - Required. Specifies the path, relative to Job, of the field to - update. For example, to update the labels of a Job the - update\_mask parameter would be specified as labels, and the - ``PATCH`` request body would specify the new value. Note: - Currently, labels is the only field that can be updated. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.UpdateJobRequest) - ), -) -_sym_db.RegisterMessage(UpdateJobRequest) - -ListJobsResponse = _reflection.GeneratedProtocolMessageType( - "ListJobsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTJOBSRESPONSE, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A list of jobs in a project. - - - Attributes: - jobs: - Output only. Jobs list. - next_page_token: - Optional. This token is included in the response if there are - more results to fetch. To fetch additional results, provide - this value as the ``page_token`` in a subsequent - ListJobsRequest. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ListJobsResponse) - ), -) -_sym_db.RegisterMessage(ListJobsResponse) - -CancelJobRequest = _reflection.GeneratedProtocolMessageType( - "CancelJobRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CANCELJOBREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A request to cancel a job. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - region: - Required. The Cloud Dataproc region in which to handle the - request. - job_id: - Required. The job ID. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.CancelJobRequest) - ), -) -_sym_db.RegisterMessage(CancelJobRequest) - -DeleteJobRequest = _reflection.GeneratedProtocolMessageType( - "DeleteJobRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEJOBREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A request to delete a job. - - - Attributes: - project_id: - Required. The ID of the Google Cloud Platform project that the - job belongs to. - region: - Required. The Cloud Dataproc region in which to handle the - request. - job_id: - Required. The job ID. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.DeleteJobRequest) - ), -) -_sym_db.RegisterMessage(DeleteJobRequest) - - -DESCRIPTOR._options = None -_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY._options = None -_HADOOPJOB_PROPERTIESENTRY._options = None -_SPARKJOB_PROPERTIESENTRY._options = None -_PYSPARKJOB_PROPERTIESENTRY._options = None -_PYSPARKJOB.fields_by_name["main_python_file_uri"]._options = None -_QUERYLIST.fields_by_name["queries"]._options = None -_HIVEJOB_SCRIPTVARIABLESENTRY._options = None -_HIVEJOB_PROPERTIESENTRY._options = None -_SPARKSQLJOB_SCRIPTVARIABLESENTRY._options = None -_SPARKSQLJOB_PROPERTIESENTRY._options = None -_PIGJOB_SCRIPTVARIABLESENTRY._options = None -_PIGJOB_PROPERTIESENTRY._options = None -_SPARKRJOB_PROPERTIESENTRY._options = None -_SPARKRJOB.fields_by_name["main_r_file_uri"]._options = None -_JOBPLACEMENT.fields_by_name["cluster_name"]._options = None -_JOBREFERENCE.fields_by_name["project_id"]._options = None -_YARNAPPLICATION.fields_by_name["name"]._options = None -_YARNAPPLICATION.fields_by_name["state"]._options = None -_YARNAPPLICATION.fields_by_name["progress"]._options = None -_YARNAPPLICATION.fields_by_name["tracking_url"]._options = None -_JOB_LABELSENTRY._options = None -_JOB.fields_by_name["placement"]._options = None -_SUBMITJOBREQUEST.fields_by_name["project_id"]._options = None -_SUBMITJOBREQUEST.fields_by_name["region"]._options = None -_SUBMITJOBREQUEST.fields_by_name["job"]._options = None -_GETJOBREQUEST.fields_by_name["project_id"]._options = None -_GETJOBREQUEST.fields_by_name["region"]._options = None -_GETJOBREQUEST.fields_by_name["job_id"]._options = None -_LISTJOBSREQUEST.fields_by_name["project_id"]._options = None -_LISTJOBSREQUEST.fields_by_name["region"]._options = None -_UPDATEJOBREQUEST.fields_by_name["project_id"]._options = None -_UPDATEJOBREQUEST.fields_by_name["region"]._options = None -_UPDATEJOBREQUEST.fields_by_name["job_id"]._options = None -_UPDATEJOBREQUEST.fields_by_name["job"]._options = None -_UPDATEJOBREQUEST.fields_by_name["update_mask"]._options = None -_CANCELJOBREQUEST.fields_by_name["project_id"]._options = None -_CANCELJOBREQUEST.fields_by_name["region"]._options = None -_CANCELJOBREQUEST.fields_by_name["job_id"]._options = None -_DELETEJOBREQUEST.fields_by_name["project_id"]._options = None -_DELETEJOBREQUEST.fields_by_name["region"]._options = None -_DELETEJOBREQUEST.fields_by_name["job_id"]._options = None - -_JOBCONTROLLER = _descriptor.ServiceDescriptor( - name="JobController", - full_name="google.cloud.dataproc.v1beta2.JobController", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" - ), - serialized_start=6411, - serialized_end=7686, - methods=[ - _descriptor.MethodDescriptor( - name="SubmitJob", - full_name="google.cloud.dataproc.v1beta2.JobController.SubmitJob", - index=0, - containing_service=None, - input_type=_SUBMITJOBREQUEST, - output_type=_JOB, - serialized_options=_b( - '\202\323\344\223\002@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\001*\332A\027project_id, region, job' - ), - ), - _descriptor.MethodDescriptor( - name="GetJob", - full_name="google.cloud.dataproc.v1beta2.JobController.GetJob", - index=1, - containing_service=None, - input_type=_GETJOBREQUEST, - output_type=_JOB, - serialized_options=_b( - "\202\323\344\223\002?\022=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\332A\032project_id, region, job_id" - ), - ), - _descriptor.MethodDescriptor( - name="ListJobs", - full_name="google.cloud.dataproc.v1beta2.JobController.ListJobs", - index=2, - containing_service=None, - input_type=_LISTJOBSREQUEST, - output_type=_LISTJOBSRESPONSE, - serialized_options=_b( - "\202\323\344\223\0026\0224/v1beta2/projects/{project_id}/regions/{region}/jobs\332A\022project_id, region\332A\032project_id, region, filter" - ), - ), - _descriptor.MethodDescriptor( - name="UpdateJob", - full_name="google.cloud.dataproc.v1beta2.JobController.UpdateJob", - index=3, - containing_service=None, - input_type=_UPDATEJOBREQUEST, - output_type=_JOB, - serialized_options=_b( - "\202\323\344\223\002D2=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:\003job" - ), - ), - _descriptor.MethodDescriptor( - name="CancelJob", - full_name="google.cloud.dataproc.v1beta2.JobController.CancelJob", - index=4, - containing_service=None, - input_type=_CANCELJOBREQUEST, - output_type=_JOB, - serialized_options=_b( - '\202\323\344\223\002I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\001*\332A\032project_id, region, job_id' - ), - ), - _descriptor.MethodDescriptor( - name="DeleteJob", - full_name="google.cloud.dataproc.v1beta2.JobController.DeleteJob", - index=5, - containing_service=None, - input_type=_DELETEJOBREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\332A\032project_id, region, job_id" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_JOBCONTROLLER) - -DESCRIPTOR.services_by_name["JobController"] = _JOBCONTROLLER - -# @@protoc_insertion_point(module_scope) diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py deleted file mode 100644 index 9a07fdbb8aaa..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py +++ /dev/null @@ -1,140 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.dataproc_v1beta2.proto import ( - jobs_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class JobControllerStub(object): - """The JobController provides methods to manage jobs. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.SubmitJob = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.JobController/SubmitJob", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.SubmitJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.Job.FromString, - ) - self.GetJob = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.JobController/GetJob", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.GetJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.Job.FromString, - ) - self.ListJobs = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.JobController/ListJobs", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.ListJobsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.ListJobsResponse.FromString, - ) - self.UpdateJob = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.JobController/UpdateJob", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.UpdateJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.Job.FromString, - ) - self.CancelJob = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.JobController/CancelJob", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.CancelJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.Job.FromString, - ) - self.DeleteJob = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.JobController/DeleteJob", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.DeleteJobRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - -class JobControllerServicer(object): - """The JobController provides methods to manage jobs. - """ - - def SubmitJob(self, request, context): - """Submits a job to a cluster. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetJob(self, request, context): - """Gets the resource representation for a job in a project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListJobs(self, request, context): - """Lists regions/{region}/jobs in a project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateJob(self, request, context): - """Updates a job in a project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CancelJob(self, request, context): - """Starts a job cancellation request. To access the job resource - after cancellation, call - [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list) - or - [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteJob(self, request, context): - """Deletes the job from the project. If the job is active, the delete fails, - and the response returns `FAILED_PRECONDITION`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_JobControllerServicer_to_server(servicer, server): - rpc_method_handlers = { - "SubmitJob": grpc.unary_unary_rpc_method_handler( - servicer.SubmitJob, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.SubmitJobRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.Job.SerializeToString, - ), - "GetJob": grpc.unary_unary_rpc_method_handler( - servicer.GetJob, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.GetJobRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.Job.SerializeToString, - ), - "ListJobs": grpc.unary_unary_rpc_method_handler( - servicer.ListJobs, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.ListJobsRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.ListJobsResponse.SerializeToString, - ), - "UpdateJob": grpc.unary_unary_rpc_method_handler( - servicer.UpdateJob, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.UpdateJobRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.Job.SerializeToString, - ), - "CancelJob": grpc.unary_unary_rpc_method_handler( - servicer.CancelJob, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.CancelJobRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.Job.SerializeToString, - ), - "DeleteJob": grpc.unary_unary_rpc_method_handler( - servicer.DeleteJob, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.DeleteJobRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.dataproc.v1beta2.JobController", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/operations.proto b/dataproc/google/cloud/dataproc_v1beta2/proto/operations.proto deleted file mode 100644 index 74cbde3cac69..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/operations.proto +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.dataproc.v1beta2; - -import "google/protobuf/timestamp.proto"; -import "google/api/annotations.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "OperationsProto"; -option java_package = "com.google.cloud.dataproc.v1beta2"; - -// The status of the operation. -message ClusterOperationStatus { - // The operation state. - enum State { - // Unused. - UNKNOWN = 0; - - // The operation has been created. - PENDING = 1; - - // The operation is running. - RUNNING = 2; - - // The operation is done; either cancelled or completed. - DONE = 3; - } - - // Output only. A message containing the operation state. - State state = 1; - - // Output only. A message containing the detailed operation state. - string inner_state = 2; - - // Output only. A message containing any operation metadata details. - string details = 3; - - // Output only. The time this state was entered. - google.protobuf.Timestamp state_start_time = 4; -} - -// Metadata describing the operation. -message ClusterOperationMetadata { - // Output only. Name of the cluster for the operation. - string cluster_name = 7; - - // Output only. Cluster UUID for the operation. - string cluster_uuid = 8; - - // Output only. Current operation status. - ClusterOperationStatus status = 9; - - // Output only. The previous operation status. - repeated ClusterOperationStatus status_history = 10; - - // Output only. The operation type. - string operation_type = 11; - - // Output only. Short description of operation. - string description = 12; - - // Output only. Labels associated with the operation - map labels = 13; - - // Output only. Errors encountered during operation execution. - repeated string warnings = 14; -} diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2.py deleted file mode 100644 index 8a1d63b987b2..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2.py +++ /dev/null @@ -1,471 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dataproc_v1beta2/proto/operations.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dataproc_v1beta2/proto/operations.proto", - package="google.cloud.dataproc.v1beta2", - syntax="proto3", - serialized_options=_b( - "\n!com.google.cloud.dataproc.v1beta2B\017OperationsProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" - ), - serialized_pb=_b( - '\n4google/cloud/dataproc_v1beta2/proto/operations.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\xfa\x01\n\x16\x43lusterOperationStatus\x12J\n\x05state\x18\x01 \x01(\x0e\x32;.google.cloud.dataproc.v1beta2.ClusterOperationStatus.State\x12\x13\n\x0binner_state\x18\x02 \x01(\t\x12\x0f\n\x07\x64\x65tails\x18\x03 \x01(\t\x12\x34\n\x10state_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"\x9f\x03\n\x18\x43lusterOperationMetadata\x12\x14\n\x0c\x63luster_name\x18\x07 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x08 \x01(\t\x12\x45\n\x06status\x18\t \x01(\x0b\x32\x35.google.cloud.dataproc.v1beta2.ClusterOperationStatus\x12M\n\x0estatus_history\x18\n \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.ClusterOperationStatus\x12\x16\n\x0eoperation_type\x18\x0b \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x0c \x01(\t\x12S\n\x06labels\x18\r \x03(\x0b\x32\x43.google.cloud.dataproc.v1beta2.ClusterOperationMetadata.LabelsEntry\x12\x10\n\x08warnings\x18\x0e \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42}\n!com.google.cloud.dataproc.v1beta2B\x0fOperationsProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' - ), - dependencies=[ - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_CLUSTEROPERATIONSTATUS_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationStatus.State", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="UNKNOWN", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PENDING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DONE", index=3, number=3, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=345, - serialized_end=401, -) -_sym_db.RegisterEnumDescriptor(_CLUSTEROPERATIONSTATUS_STATE) - - -_CLUSTEROPERATIONSTATUS = _descriptor.Descriptor( - name="ClusterOperationStatus", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationStatus", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationStatus.state", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="inner_state", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationStatus.inner_state", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="details", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationStatus.details", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="state_start_time", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationStatus.state_start_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_CLUSTEROPERATIONSTATUS_STATE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=151, - serialized_end=401, -) - - -_CLUSTEROPERATIONMETADATA_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationMetadata.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationMetadata.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationMetadata.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=774, - serialized_end=819, -) - -_CLUSTEROPERATIONMETADATA = _descriptor.Descriptor( - name="ClusterOperationMetadata", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationMetadata.cluster_name", - index=0, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_uuid", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationMetadata.cluster_uuid", - index=1, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationMetadata.status", - index=2, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status_history", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationMetadata.status_history", - index=3, - number=10, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="operation_type", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationMetadata.operation_type", - index=4, - number=11, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationMetadata.description", - index=5, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationMetadata.labels", - index=6, - number=13, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="warnings", - full_name="google.cloud.dataproc.v1beta2.ClusterOperationMetadata.warnings", - index=7, - number=14, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_CLUSTEROPERATIONMETADATA_LABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=404, - serialized_end=819, -) - -_CLUSTEROPERATIONSTATUS.fields_by_name[ - "state" -].enum_type = _CLUSTEROPERATIONSTATUS_STATE -_CLUSTEROPERATIONSTATUS.fields_by_name[ - "state_start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_CLUSTEROPERATIONSTATUS_STATE.containing_type = _CLUSTEROPERATIONSTATUS -_CLUSTEROPERATIONMETADATA_LABELSENTRY.containing_type = _CLUSTEROPERATIONMETADATA -_CLUSTEROPERATIONMETADATA.fields_by_name[ - "status" -].message_type = _CLUSTEROPERATIONSTATUS -_CLUSTEROPERATIONMETADATA.fields_by_name[ - "status_history" -].message_type = _CLUSTEROPERATIONSTATUS -_CLUSTEROPERATIONMETADATA.fields_by_name[ - "labels" -].message_type = _CLUSTEROPERATIONMETADATA_LABELSENTRY -DESCRIPTOR.message_types_by_name["ClusterOperationStatus"] = _CLUSTEROPERATIONSTATUS -DESCRIPTOR.message_types_by_name["ClusterOperationMetadata"] = _CLUSTEROPERATIONMETADATA -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ClusterOperationStatus = _reflection.GeneratedProtocolMessageType( - "ClusterOperationStatus", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTEROPERATIONSTATUS, - __module__="google.cloud.dataproc_v1beta2.proto.operations_pb2", - __doc__="""The status of the operation. - - - Attributes: - state: - Output only. A message containing the operation state. - inner_state: - Output only. A message containing the detailed operation - state. - details: - Output only. A message containing any operation metadata - details. - state_start_time: - Output only. The time this state was entered. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ClusterOperationStatus) - ), -) -_sym_db.RegisterMessage(ClusterOperationStatus) - -ClusterOperationMetadata = _reflection.GeneratedProtocolMessageType( - "ClusterOperationMetadata", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTEROPERATIONMETADATA_LABELSENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.operations_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ClusterOperationMetadata.LabelsEntry) - ), - ), - DESCRIPTOR=_CLUSTEROPERATIONMETADATA, - __module__="google.cloud.dataproc_v1beta2.proto.operations_pb2", - __doc__="""Metadata describing the operation. - - - Attributes: - cluster_name: - Output only. Name of the cluster for the operation. - cluster_uuid: - Output only. Cluster UUID for the operation. - status: - Output only. Current operation status. - status_history: - Output only. The previous operation status. - operation_type: - Output only. The operation type. - description: - Output only. Short description of operation. - labels: - Output only. Labels associated with the operation - warnings: - Output only. Errors encountered during operation execution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ClusterOperationMetadata) - ), -) -_sym_db.RegisterMessage(ClusterOperationMetadata) -_sym_db.RegisterMessage(ClusterOperationMetadata.LabelsEntry) - - -DESCRIPTOR._options = None -_CLUSTEROPERATIONMETADATA_LABELSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/shared.proto b/dataproc/google/cloud/dataproc_v1beta2/proto/shared.proto deleted file mode 100644 index de1130d9c0b1..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/shared.proto +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.dataproc.v1beta2; - -import "google/api/annotations.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "SharedProto"; -option java_package = "com.google.cloud.dataproc.v1beta2"; - -// Cluster components that can be activated. -enum Component { - // Unspecified component. - COMPONENT_UNSPECIFIED = 0; - - // The Anaconda python distribution. - ANACONDA = 5; - - // The Druid query engine. - DRUID = 9; - - // The Hive Web HCatalog (the REST service for accessing HCatalog). - HIVE_WEBHCAT = 3; - - // The Jupyter Notebook. - JUPYTER = 1; - - // The Kerberos security feature. - KERBEROS = 7; - - // The Presto query engine. - PRESTO = 6; - - // The Zeppelin notebook. - ZEPPELIN = 4; - - // The Zookeeper service. - ZOOKEEPER = 8; -} diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/shared_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/shared_pb2.py deleted file mode 100644 index 391bfa139f90..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/shared_pb2.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dataproc_v1beta2/proto/shared.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dataproc_v1beta2/proto/shared.proto", - package="google.cloud.dataproc.v1beta2", - syntax="proto3", - serialized_options=_b( - "\n!com.google.cloud.dataproc.v1beta2B\013SharedProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" - ), - serialized_pb=_b( - "\n0google/cloud/dataproc_v1beta2/proto/shared.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto*\x95\x01\n\tComponent\x12\x19\n\x15\x43OMPONENT_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x41NACONDA\x10\x05\x12\t\n\x05\x44RUID\x10\t\x12\x10\n\x0cHIVE_WEBHCAT\x10\x03\x12\x0b\n\x07JUPYTER\x10\x01\x12\x0c\n\x08KERBEROS\x10\x07\x12\n\n\x06PRESTO\x10\x06\x12\x0c\n\x08ZEPPELIN\x10\x04\x12\r\n\tZOOKEEPER\x10\x08\x42y\n!com.google.cloud.dataproc.v1beta2B\x0bSharedProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3" - ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], -) - -_COMPONENT = _descriptor.EnumDescriptor( - name="Component", - full_name="google.cloud.dataproc.v1beta2.Component", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="COMPONENT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ANACONDA", index=1, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DRUID", index=2, number=9, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="HIVE_WEBHCAT", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="JUPYTER", index=4, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="KERBEROS", index=5, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PRESTO", index=6, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ZEPPELIN", index=7, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ZOOKEEPER", index=8, number=8, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=114, - serialized_end=263, -) -_sym_db.RegisterEnumDescriptor(_COMPONENT) - -Component = enum_type_wrapper.EnumTypeWrapper(_COMPONENT) -COMPONENT_UNSPECIFIED = 0 -ANACONDA = 5 -DRUID = 9 -HIVE_WEBHCAT = 3 -JUPYTER = 1 -KERBEROS = 7 -PRESTO = 6 -ZEPPELIN = 4 -ZOOKEEPER = 8 - - -DESCRIPTOR.enum_types_by_name["Component"] = _COMPONENT -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/shared_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1beta2/proto/shared_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/shared_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto deleted file mode 100644 index 2979593ddc22..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto +++ /dev/null @@ -1,787 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.dataproc.v1beta2; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/dataproc/v1beta2/clusters.proto"; -import "google/cloud/dataproc/v1beta2/jobs.proto"; -import "google/longrunning/operations.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "WorkflowTemplatesProto"; -option java_package = "com.google.cloud.dataproc.v1beta2"; - -// The API interface for managing Workflow Templates in the -// Cloud Dataproc API. -service WorkflowTemplateService { - option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates new workflow template. - rpc CreateWorkflowTemplate(CreateWorkflowTemplateRequest) returns (WorkflowTemplate) { - option (google.api.http) = { - post: "/v1beta2/{parent=projects/*/regions/*}/workflowTemplates" - body: "template" - additional_bindings { - post: "/v1beta2/{parent=projects/*/locations/*}/workflowTemplates" - body: "template" - } - }; - option (google.api.method_signature) = "parent, template"; - } - - // Retrieves the latest workflow template. - // - // Can retrieve previously instantiated template by specifying optional - // version parameter. - rpc GetWorkflowTemplate(GetWorkflowTemplateRequest) returns (WorkflowTemplate) { - option (google.api.http) = { - get: "/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}" - additional_bindings { - get: "/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Instantiates a template and begins execution. - // - // The returned Operation can be used to track execution of - // workflow by polling - // [operations.get][google.longrunning.Operations.GetOperation]. - // The Operation will complete when entire workflow is finished. - // - // The running workflow can be aborted via - // [operations.cancel][google.longrunning.Operations.CancelOperation]. - // This will cause any inflight jobs to be cancelled and workflow-owned - // clusters to be deleted. - // - // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#workflowmetadata). - // Also see [Using - // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). - // - // On successful completion, - // [Operation.response][google.longrunning.Operation.response] will be - // [Empty][google.protobuf.Empty]. - rpc InstantiateWorkflowTemplate(InstantiateWorkflowTemplateRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate" - body: "*" - additional_bindings { - post: "/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}:instantiate" - body: "*" - } - }; - option (google.api.method_signature) = "name"; - option (google.api.method_signature) = "name, parameters"; - option (google.longrunning.operation_info) = { - response_type: "google.protobuf.Empty" - metadata_type: "WorkflowMetadata" - }; - } - - // Instantiates a template and begins execution. - // - // This method is equivalent to executing the sequence - // [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate], - // [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate]. - // - // The returned Operation can be used to track execution of - // workflow by polling - // [operations.get][google.longrunning.Operations.GetOperation]. - // The Operation will complete when entire workflow is finished. - // - // The running workflow can be aborted via - // [operations.cancel][google.longrunning.Operations.CancelOperation]. - // This will cause any inflight jobs to be cancelled and workflow-owned - // clusters to be deleted. - // - // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). - // Also see [Using - // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). - // - // On successful completion, - // [Operation.response][google.longrunning.Operation.response] will be - // [Empty][google.protobuf.Empty]. - rpc InstantiateInlineWorkflowTemplate(InstantiateInlineWorkflowTemplateRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline" - body: "template" - additional_bindings { - post: "/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline" - body: "template" - } - }; - option (google.api.method_signature) = "parent, template"; - option (google.longrunning.operation_info) = { - response_type: "google.protobuf.Empty" - metadata_type: "WorkflowMetadata" - }; - } - - // Updates (replaces) workflow template. The updated template - // must contain version that matches the current server version. - rpc UpdateWorkflowTemplate(UpdateWorkflowTemplateRequest) returns (WorkflowTemplate) { - option (google.api.http) = { - put: "/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}" - body: "template" - additional_bindings { - put: "/v1beta2/{template.name=projects/*/locations/*/workflowTemplates/*}" - body: "template" - } - }; - option (google.api.method_signature) = "template"; - } - - // Lists workflows that match the specified filter in the request. - rpc ListWorkflowTemplates(ListWorkflowTemplatesRequest) returns (ListWorkflowTemplatesResponse) { - option (google.api.http) = { - get: "/v1beta2/{parent=projects/*/regions/*}/workflowTemplates" - additional_bindings { - get: "/v1beta2/{parent=projects/*/locations/*}/workflowTemplates" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a workflow template. It does not cancel in-progress workflows. - rpc DeleteWorkflowTemplate(DeleteWorkflowTemplateRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}" - additional_bindings { - delete: "/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}" - } - }; - option (google.api.method_signature) = "name"; - } -} - -// A Cloud Dataproc workflow template resource. -message WorkflowTemplate { - option (google.api.resource) = { - type: "dataproc.googleapis.com/WorkflowTemplate" - pattern: "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}" - pattern: "projects/{project}/locations/{location}/workflowTemplates/{workflow_template}" - history: ORIGINALLY_SINGLE_PATTERN - }; - - // Required. The template id. - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). Cannot begin or end with underscore - // or hyphen. Must consist of between 3 and 50 characters. - // - // . - string id = 2 [(google.api.field_behavior) = REQUIRED]; - - // Output only. The resource name of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates`, the resource name of the - // template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates`, the resource name of the - // template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. Used to perform a consistent read-modify-write. - // - // This field should be left blank for a `CreateWorkflowTemplate` request. It - // is required for an `UpdateWorkflowTemplate` request, and must match the - // current server version. A typical update template flow would fetch the - // current template with a `GetWorkflowTemplate` request, which will return - // the current template with the `version` field filled in with the - // current server version. The user updates other fields in the template, - // then returns it as part of the `UpdateWorkflowTemplate` request. - int32 version = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The time template was created. - google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The time template was last updated. - google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The labels to associate with this template. These labels - // will be propagated to all jobs and clusters created by the workflow - // instance. - // - // Label **keys** must contain 1 to 63 characters, and must conform to - // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). - // - // Label **values** may be empty, but, if present, must contain 1 to 63 - // characters, and must conform to - // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). - // - // No more than 32 labels can be associated with a template. - map labels = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Required. WorkflowTemplate scheduling information. - WorkflowTemplatePlacement placement = 7; - - // Required. The Directed Acyclic Graph of Jobs to submit. - repeated OrderedJob jobs = 8; - - // Optional. Template parameters whose values are substituted into the - // template. Values for parameters must be provided when the template is - // instantiated. - repeated TemplateParameter parameters = 9 [(google.api.field_behavior) = OPTIONAL]; -} - -// Specifies workflow execution target. -// -// Either `managed_cluster` or `cluster_selector` is required. -message WorkflowTemplatePlacement { - // Required. Specifies where workflow executes; either on a managed - // cluster or an existing cluster chosen by labels. - oneof placement { - // Optional. A cluster that is managed by the workflow. - ManagedCluster managed_cluster = 1; - - // Optional. A selector that chooses target cluster for jobs based - // on metadata. - // - // The selector is evaluated at the time each job is submitted. - ClusterSelector cluster_selector = 2; - } -} - -// Cluster that is managed by the workflow. -message ManagedCluster { - // Required. The cluster name prefix. A unique cluster name will be formed by - // appending a random suffix. - // - // The name must contain only lower-case letters (a-z), numbers (0-9), - // and hyphens (-). Must begin with a letter. Cannot begin or end with - // hyphen. Must consist of between 2 and 35 characters. - string cluster_name = 2; - - // Required. The cluster configuration. - ClusterConfig config = 3; - - // Optional. The labels to associate with this cluster. - // - // Label keys must be between 1 and 63 characters long, and must conform to - // the following PCRE regular expression: - // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} - // - // Label values must be between 1 and 63 characters long, and must conform to - // the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} - // - // No more than 32 labels can be associated with a given cluster. - map labels = 4; -} - -// A selector that chooses target cluster for jobs based on metadata. -message ClusterSelector { - // Optional. The zone where workflow process executes. This parameter does not - // affect the selection of the cluster. - // - // If unspecified, the zone of the first cluster matching the selector - // is used. - string zone = 1; - - // Required. The cluster labels. Cluster must have all labels - // to match. - map cluster_labels = 2; -} - -// A job executed by the workflow. -message OrderedJob { - // Required. The step id. The id must be unique among all jobs - // within the template. - // - // The step id is used as prefix for job id, as job - // `goog-dataproc-workflow-step-id` label, and in - // [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] field from other - // steps. - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). Cannot begin or end with underscore - // or hyphen. Must consist of between 3 and 50 characters. - string step_id = 1; - - // Required. The job definition. - oneof job_type { - // Job is a Hadoop job. - HadoopJob hadoop_job = 2; - - // Job is a Spark job. - SparkJob spark_job = 3; - - // Job is a Pyspark job. - PySparkJob pyspark_job = 4; - - // Job is a Hive job. - HiveJob hive_job = 5; - - // Job is a Pig job. - PigJob pig_job = 6; - - // Job is a SparkSql job. - SparkSqlJob spark_sql_job = 7; - } - - // Optional. The labels to associate with this job. - // - // Label keys must be between 1 and 63 characters long, and must conform to - // the following regular expression: - // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} - // - // Label values must be between 1 and 63 characters long, and must conform to - // the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} - // - // No more than 32 labels can be associated with a given job. - map labels = 8; - - // Optional. Job scheduling configuration. - JobScheduling scheduling = 9; - - // Optional. The optional list of prerequisite job step_ids. - // If not specified, the job will start at the beginning of workflow. - repeated string prerequisite_step_ids = 10; -} - -// A configurable parameter that replaces one or more fields in the template. -// Parameterizable fields: -// - Labels -// - File uris -// - Job properties -// - Job arguments -// - Script variables -// - Main class (in HadoopJob and SparkJob) -// - Zone (in ClusterSelector) -message TemplateParameter { - // Required. Parameter name. - // The parameter name is used as the key, and paired with the - // parameter value, which are passed to the template when the template - // is instantiated. - // The name must contain only capital letters (A-Z), numbers (0-9), and - // underscores (_), and must not start with a number. The maximum length is - // 40 characters. - string name = 1; - - // Required. Paths to all fields that the parameter replaces. - // A field is allowed to appear in at most one parameter's list of field - // paths. - // - // A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - // For example, a field path that references the zone field of a workflow - // template's cluster selector would be specified as - // `placement.clusterSelector.zone`. - // - // Also, field paths can reference fields using the following syntax: - // - // * Values in maps can be referenced by key: - // * labels['key'] - // * placement.clusterSelector.clusterLabels['key'] - // * placement.managedCluster.labels['key'] - // * placement.clusterSelector.clusterLabels['key'] - // * jobs['step-id'].labels['key'] - // - // * Jobs in the jobs list can be referenced by step-id: - // * jobs['step-id'].hadoopJob.mainJarFileUri - // * jobs['step-id'].hiveJob.queryFileUri - // * jobs['step-id'].pySparkJob.mainPythonFileUri - // * jobs['step-id'].hadoopJob.jarFileUris[0] - // * jobs['step-id'].hadoopJob.archiveUris[0] - // * jobs['step-id'].hadoopJob.fileUris[0] - // * jobs['step-id'].pySparkJob.pythonFileUris[0] - // - // * Items in repeated fields can be referenced by a zero-based index: - // * jobs['step-id'].sparkJob.args[0] - // - // * Other examples: - // * jobs['step-id'].hadoopJob.properties['key'] - // * jobs['step-id'].hadoopJob.args[0] - // * jobs['step-id'].hiveJob.scriptVariables['key'] - // * jobs['step-id'].hadoopJob.mainJarFileUri - // * placement.clusterSelector.zone - // - // It may not be possible to parameterize maps and repeated fields in their - // entirety since only individual map values and individual items in repeated - // fields can be referenced. For example, the following field paths are - // invalid: - // - // - placement.clusterSelector.clusterLabels - // - jobs['step-id'].sparkJob.args - repeated string fields = 2; - - // Optional. Brief description of the parameter. - // Must not exceed 1024 characters. - string description = 3; - - // Optional. Validation rules to be applied to this parameter's value. - ParameterValidation validation = 4; -} - -// Configuration for parameter validation. -message ParameterValidation { - // Required. The type of validation to be performed. - oneof validation_type { - // Validation based on regular expressions. - RegexValidation regex = 1; - - // Validation based on a list of allowed values. - ValueValidation values = 2; - } -} - -// Validation based on regular expressions. -message RegexValidation { - // Required. RE2 regular expressions used to validate the parameter's value. - // The value must match the regex in its entirety (substring - // matches are not sufficient). - repeated string regexes = 1; -} - -// Validation based on a list of allowed values. -message ValueValidation { - // Required. List of allowed values for the parameter. - repeated string values = 1; -} - -// A Cloud Dataproc workflow template resource. -message WorkflowMetadata { - // The operation state. - enum State { - // Unused. - UNKNOWN = 0; - - // The operation has been created. - PENDING = 1; - - // The operation is running. - RUNNING = 2; - - // The operation is done; either cancelled or completed. - DONE = 3; - } - - // Output only. The resource name of the workflow template as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates`, the resource name of the - // template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates`, the resource name of the - // template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string template = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The version of template at the time of - // workflow instantiation. - int32 version = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The create cluster operation metadata. - ClusterOperation create_cluster = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The workflow graph. - WorkflowGraph graph = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The delete cluster operation metadata. - ClusterOperation delete_cluster = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The workflow state. - State state = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The name of the target cluster. - string cluster_name = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Map from parameter names to values that were used for those parameters. - map parameters = 8; - - // Output only. Workflow start time. - google.protobuf.Timestamp start_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Workflow end time. - google.protobuf.Timestamp end_time = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The UUID of target cluster. - string cluster_uuid = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The cluster operation triggered by a workflow. -message ClusterOperation { - // Output only. The id of the cluster operation. - string operation_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Error, if operation failed. - string error = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Indicates the operation is done. - bool done = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The workflow graph. -message WorkflowGraph { - // Output only. The workflow nodes. - repeated WorkflowNode nodes = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The workflow node. -message WorkflowNode { - // The workflow node state. - enum NodeState { - // State is unspecified. - NODE_STATUS_UNSPECIFIED = 0; - - // The node is awaiting prerequisite node to finish. - BLOCKED = 1; - - // The node is runnable but not running. - RUNNABLE = 2; - - // The node is running. - RUNNING = 3; - - // The node completed successfully. - COMPLETED = 4; - - // The node failed. A node can be marked FAILED because - // its ancestor or peer failed. - FAILED = 5; - } - - // Output only. The name of the node. - string step_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Node's prerequisite nodes. - repeated string prerequisite_step_ids = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The job id; populated after the node enters RUNNING state. - string job_id = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The node state. - NodeState state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The error detail. - string error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to create a workflow template. -message CreateWorkflowTemplateRequest { - // Required. The resource name of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates,create`, the resource name of the - // region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.workflowTemplates.create`, the resource name of - // the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Required. The Dataproc workflow template to create. - WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to fetch a workflow template. -message GetWorkflowTemplateRequest { - // Required. The resource name of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates.get`, the resource name of the - // template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates.get`, the resource name of the - // template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Optional. The version of workflow template to retrieve. Only previously - // instantiated versions can be retrieved. - // - // If unspecified, retrieves the current version. - int32 version = 2; -} - -// A request to instantiate a workflow template. -message InstantiateWorkflowTemplateRequest { - // Required. The resource name of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates.instantiate`, the resource name - // of the template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates.instantiate`, the resource name - // of the template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Optional. The version of workflow template to instantiate. If specified, - // the workflow will be instantiated only if the current version of - // the workflow template has the supplied version. - // - // This option cannot be used to instantiate a previous version of - // workflow template. - int32 version = 2; - - // Deprecated. Please use `request_id` field instead. - string instance_id = 3 [deprecated = true]; - - // Optional. A tag that prevents multiple concurrent workflow - // instances with the same tag from running. This mitigates risk of - // concurrent instances started due to retries. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The tag must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5; - - // Optional. Map from parameter names to values that should be used for those - // parameters. Values may not exceed 100 characters. - map parameters = 4; -} - -// A request to instantiate an inline workflow template. -message InstantiateInlineWorkflowTemplateRequest { - // Required. The resource name of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates,instantiateinline`, the resource - // name of the region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.workflowTemplates.instantiateinline`, the - // resource name of the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Required. The workflow template to instantiate. - WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; - - // Deprecated. Please use `request_id` field instead. - string instance_id = 3; - - // Optional. A tag that prevents multiple concurrent workflow - // instances with the same tag from running. This mitigates risk of - // concurrent instances started due to retries. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The tag must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4; -} - -// A request to update a workflow template. -message UpdateWorkflowTemplateRequest { - // Required. The updated workflow template. - // - // The `template.version` field must match the current version. - WorkflowTemplate template = 1 [ - (google.api.field_behavior) = REQUIRED - ]; -} - -// A request to list workflow templates in a project. -message ListWorkflowTemplatesRequest { - // Required. The resource name of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates,list`, the resource - // name of the region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.workflowTemplates.list`, the - // resource name of the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The maximum number of results to return in each response. - int32 page_size = 2; - - // Optional. The page token, returned by a previous call, to request the - // next page of results. - string page_token = 3; -} - -// A response to a request to list workflow templates in a project. -message ListWorkflowTemplatesResponse { - // Output only. WorkflowTemplates list. - repeated WorkflowTemplate templates = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. This token is included in the response if there are more - // results to fetch. To fetch additional results, provide this value as the - // page_token in a subsequent ListWorkflowTemplatesRequest. - string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to delete a workflow template. -// -// Currently started workflows will remain running. -message DeleteWorkflowTemplateRequest { - // Required. The resource name of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates.delete`, the resource name - // of the template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates.instantiate`, the resource name - // of the template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Optional. The version of workflow template to delete. If specified, - // will only delete the template if the current server version matches - // specified version. - int32 version = 2; -} diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py deleted file mode 100644 index af679f35ffbf..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py +++ /dev/null @@ -1,3381 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dataproc_v1beta2/proto/workflow_templates.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.dataproc_v1beta2.proto import ( - clusters_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2, -) -from google.cloud.dataproc_v1beta2.proto import ( - jobs_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dataproc_v1beta2/proto/workflow_templates.proto", - package="google.cloud.dataproc.v1beta2", - syntax="proto3", - serialized_options=_b( - "\n!com.google.cloud.dataproc.v1beta2B\026WorkflowTemplatesProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" - ), - serialized_pb=_b( - '\n\n\nhadoop_job\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x04 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x05 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x06 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12\x43\n\rspark_sql_job\x18\x07 \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x45\n\x06labels\x18\x08 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.OrderedJob.LabelsEntry\x12@\n\nscheduling\x18\t \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x1d\n\x15prerequisite_step_ids\x18\n \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x8e\x01\n\x11TemplateParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x46\n\nvalidation\x18\x04 \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ParameterValidation"\xab\x01\n\x13ParameterValidation\x12?\n\x05regex\x18\x01 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.RegexValidationH\x00\x12@\n\x06values\x18\x02 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.ValueValidationH\x00\x42\x11\n\x0fvalidation_type""\n\x0fRegexValidation\x12\x0f\n\x07regexes\x18\x01 \x03(\t"!\n\x0fValueValidation\x12\x0e\n\x06values\x18\x01 \x03(\t"\xc8\x05\n\x10WorkflowMetadata\x12\x15\n\x08template\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x03\x12L\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperationB\x03\xe0\x41\x03\x12@\n\x05graph\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.WorkflowGraphB\x03\xe0\x41\x03\x12L\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperationB\x03\xe0\x41\x03\x12I\n\x05state\x18\x06 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowMetadata.StateB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_name\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12S\n\nparameters\x18\x08 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.WorkflowMetadata.ParametersEntry\x12\x33\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x0b \x01(\tB\x03\xe0\x41\x03\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"T\n\x10\x43lusterOperation\x12\x19\n\x0coperation_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04\x64one\x18\x03 \x01(\x08\x42\x03\xe0\x41\x03"P\n\rWorkflowGraph\x12?\n\x05nodes\x18\x01 \x03(\x0b\x32+.google.cloud.dataproc.v1beta2.WorkflowNodeB\x03\xe0\x41\x03"\xa9\x02\n\x0cWorkflowNode\x12\x14\n\x07step_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12"\n\x15prerequisite_step_ids\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12I\n\x05state\x18\x05 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowNode.NodeStateB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x06 \x01(\tB\x03\xe0\x41\x03"k\n\tNodeState\x12\x1b\n\x17NODE_STATUS_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"\xa9\x01\n\x1d\x43reateWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x46\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x02"m\n\x1aGetWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05"\xbc\x02\n"InstantiateWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x17\n\x0binstance_id\x18\x03 \x01(\tB\x02\x18\x01\x12\x12\n\nrequest_id\x18\x05 \x01(\t\x12\x65\n\nparameters\x18\x04 \x03(\x0b\x32Q.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.ParametersEntry\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xdd\x01\n(InstantiateInlineWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x46\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x02\x12\x13\n\x0binstance_id\x18\x03 \x01(\t\x12\x12\n\nrequest_id\x18\x04 \x01(\t"g\n\x1dUpdateWorkflowTemplateRequest\x12\x46\n\x08template\x18\x01 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x02"Z\n\x1cListWorkflowTemplatesRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"\x86\x01\n\x1dListWorkflowTemplatesResponse\x12G\n\ttemplates\x18\x01 \x03(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"p\n\x1d\x44\x65leteWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05\x32\xe9\x11\n\x17WorkflowTemplateService\x12\xb0\x02\n\x16\x43reateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\xa6\x01\x82\xd3\xe4\x93\x02\x8c\x01"8/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:\x08templateZF":/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:\x08template\xda\x41\x10parent, template\x12\x89\x02\n\x13GetWorkflowTemplate\x12\x39.google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\x85\x01\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<\x12:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\xda\x41\x04name\x12\xe5\x02\n\x1bInstantiateWorkflowTemplate\x12\x41.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xe3\x01\x82\xd3\xe4\x93\x02\x96\x01"D/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*ZK"F/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*\xda\x41\x04name\xda\x41\x10name, parameters\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\x84\x03\n!InstantiateInlineWorkflowTemplate\x12G.google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xf6\x01\x82\xd3\xe4\x93\x02\xb0\x01"L/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZV"J/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\xda\x41\x10parent, template\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xba\x02\n\x16UpdateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.UpdateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\xb0\x01\x82\xd3\xe4\x93\x02\x9e\x01\x1a\x41/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}:\x08templateZO\x1a\x43/v1beta2/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08template\xda\x41\x08template\x12\x9c\x02\n\x15ListWorkflowTemplates\x12;.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest\x1a<.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse"\x87\x01\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{parent=projects/*/regions/*}/workflowTemplatesZ<\x12:/v1beta2/{parent=projects/*/locations/*}/workflowTemplates\xda\x41\x06parent\x12\xf6\x01\n\x16\x44\x65leteWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest\x1a\x16.google.protobuf.Empty"\x85\x01\x82\xd3\xe4\x93\x02x*8/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<*:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\xda\x41\x04name\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x84\x01\n!com.google.cloud.dataproc.v1beta2B\x16WorkflowTemplatesProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.DESCRIPTOR, - google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.DESCRIPTOR, - google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_WORKFLOWMETADATA_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.State", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="UNKNOWN", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PENDING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DONE", index=3, number=3, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3421, - serialized_end=3477, -) -_sym_db.RegisterEnumDescriptor(_WORKFLOWMETADATA_STATE) - -_WORKFLOWNODE_NODESTATE = _descriptor.EnumDescriptor( - name="NodeState", - full_name="google.cloud.dataproc.v1beta2.WorkflowNode.NodeState", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="NODE_STATUS_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="BLOCKED", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNABLE", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="COMPLETED", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FAILED", index=5, number=5, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3838, - serialized_end=3945, -) -_sym_db.RegisterEnumDescriptor(_WORKFLOWNODE_NODESTATE) - - -_WORKFLOWTEMPLATE_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplate.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplate.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplate.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=887, - serialized_end=932, -) - -_WORKFLOWTEMPLATE = _descriptor.Descriptor( - name="WorkflowTemplate", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplate", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="id", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplate.id", - index=0, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplate.name", - index=1, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplate.version", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplate.create_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplate.update_time", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplate.labels", - index=5, - number=6, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="placement", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplate.placement", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="jobs", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplate.jobs", - index=7, - number=8, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="parameters", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplate.parameters", - index=8, - number=9, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_WORKFLOWTEMPLATE_LABELSENTRY], - enum_types=[], - serialized_options=_b( - "\352A\306\001\n(dataproc.googleapis.com/WorkflowTemplate\022Iprojects/{project}/regions/{region}/workflowTemplates/{workflow_template}\022Mprojects/{project}/locations/{location}/workflowTemplates/{workflow_template} \001" - ), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=410, - serialized_end=1137, -) - - -_WORKFLOWTEMPLATEPLACEMENT = _descriptor.Descriptor( - name="WorkflowTemplatePlacement", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplatePlacement", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="managed_cluster", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplatePlacement.managed_cluster", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_selector", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplatePlacement.cluster_selector", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="placement", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplatePlacement.placement", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=1140, - serialized_end=1330, -) - - -_MANAGEDCLUSTER_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.cloud.dataproc.v1beta2.ManagedCluster.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.ManagedCluster.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.ManagedCluster.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=887, - serialized_end=932, -) - -_MANAGEDCLUSTER = _descriptor.Descriptor( - name="ManagedCluster", - full_name="google.cloud.dataproc.v1beta2.ManagedCluster", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1beta2.ManagedCluster.cluster_name", - index=0, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="config", - full_name="google.cloud.dataproc.v1beta2.ManagedCluster.config", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.cloud.dataproc.v1beta2.ManagedCluster.labels", - index=2, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_MANAGEDCLUSTER_LABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1333, - serialized_end=1555, -) - - -_CLUSTERSELECTOR_CLUSTERLABELSENTRY = _descriptor.Descriptor( - name="ClusterLabelsEntry", - full_name="google.cloud.dataproc.v1beta2.ClusterSelector.ClusterLabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.ClusterSelector.ClusterLabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.ClusterSelector.ClusterLabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1682, - serialized_end=1734, -) - -_CLUSTERSELECTOR = _descriptor.Descriptor( - name="ClusterSelector", - full_name="google.cloud.dataproc.v1beta2.ClusterSelector", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="zone", - full_name="google.cloud.dataproc.v1beta2.ClusterSelector.zone", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_labels", - full_name="google.cloud.dataproc.v1beta2.ClusterSelector.cluster_labels", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_CLUSTERSELECTOR_CLUSTERLABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1558, - serialized_end=1734, -) - - -_ORDEREDJOB_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=887, - serialized_end=932, -) - -_ORDEREDJOB = _descriptor.Descriptor( - name="OrderedJob", - full_name="google.cloud.dataproc.v1beta2.OrderedJob", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="step_id", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.step_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="hadoop_job", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.hadoop_job", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="spark_job", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.spark_job", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="pyspark_job", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.pyspark_job", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="hive_job", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.hive_job", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="pig_job", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.pig_job", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="spark_sql_job", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.spark_sql_job", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.labels", - index=7, - number=8, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="scheduling", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.scheduling", - index=8, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="prerequisite_step_ids", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids", - index=9, - number=10, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_ORDEREDJOB_LABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="job_type", - full_name="google.cloud.dataproc.v1beta2.OrderedJob.job_type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=1737, - serialized_end=2372, -) - - -_TEMPLATEPARAMETER = _descriptor.Descriptor( - name="TemplateParameter", - full_name="google.cloud.dataproc.v1beta2.TemplateParameter", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.dataproc.v1beta2.TemplateParameter.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fields", - full_name="google.cloud.dataproc.v1beta2.TemplateParameter.fields", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.dataproc.v1beta2.TemplateParameter.description", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="validation", - full_name="google.cloud.dataproc.v1beta2.TemplateParameter.validation", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2375, - serialized_end=2517, -) - - -_PARAMETERVALIDATION = _descriptor.Descriptor( - name="ParameterValidation", - full_name="google.cloud.dataproc.v1beta2.ParameterValidation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="regex", - full_name="google.cloud.dataproc.v1beta2.ParameterValidation.regex", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="values", - full_name="google.cloud.dataproc.v1beta2.ParameterValidation.values", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="validation_type", - full_name="google.cloud.dataproc.v1beta2.ParameterValidation.validation_type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=2520, - serialized_end=2691, -) - - -_REGEXVALIDATION = _descriptor.Descriptor( - name="RegexValidation", - full_name="google.cloud.dataproc.v1beta2.RegexValidation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="regexes", - full_name="google.cloud.dataproc.v1beta2.RegexValidation.regexes", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2693, - serialized_end=2727, -) - - -_VALUEVALIDATION = _descriptor.Descriptor( - name="ValueValidation", - full_name="google.cloud.dataproc.v1beta2.ValueValidation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="values", - full_name="google.cloud.dataproc.v1beta2.ValueValidation.values", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2729, - serialized_end=2762, -) - - -_WORKFLOWMETADATA_PARAMETERSENTRY = _descriptor.Descriptor( - name="ParametersEntry", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.ParametersEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.ParametersEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.ParametersEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3370, - serialized_end=3419, -) - -_WORKFLOWMETADATA = _descriptor.Descriptor( - name="WorkflowMetadata", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="template", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.template", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.version", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_cluster", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.create_cluster", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="graph", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.graph", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="delete_cluster", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.delete_cluster", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.state", - index=5, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.cluster_name", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="parameters", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.parameters", - index=7, - number=8, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.start_time", - index=8, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.end_time", - index=9, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_uuid", - full_name="google.cloud.dataproc.v1beta2.WorkflowMetadata.cluster_uuid", - index=10, - number=11, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_WORKFLOWMETADATA_PARAMETERSENTRY], - enum_types=[_WORKFLOWMETADATA_STATE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2765, - serialized_end=3477, -) - - -_CLUSTEROPERATION = _descriptor.Descriptor( - name="ClusterOperation", - full_name="google.cloud.dataproc.v1beta2.ClusterOperation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="operation_id", - full_name="google.cloud.dataproc.v1beta2.ClusterOperation.operation_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="error", - full_name="google.cloud.dataproc.v1beta2.ClusterOperation.error", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="done", - full_name="google.cloud.dataproc.v1beta2.ClusterOperation.done", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3479, - serialized_end=3563, -) - - -_WORKFLOWGRAPH = _descriptor.Descriptor( - name="WorkflowGraph", - full_name="google.cloud.dataproc.v1beta2.WorkflowGraph", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="nodes", - full_name="google.cloud.dataproc.v1beta2.WorkflowGraph.nodes", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3565, - serialized_end=3645, -) - - -_WORKFLOWNODE = _descriptor.Descriptor( - name="WorkflowNode", - full_name="google.cloud.dataproc.v1beta2.WorkflowNode", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="step_id", - full_name="google.cloud.dataproc.v1beta2.WorkflowNode.step_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="prerequisite_step_ids", - full_name="google.cloud.dataproc.v1beta2.WorkflowNode.prerequisite_step_ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_id", - full_name="google.cloud.dataproc.v1beta2.WorkflowNode.job_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.dataproc.v1beta2.WorkflowNode.state", - index=3, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="error", - full_name="google.cloud.dataproc.v1beta2.WorkflowNode.error", - index=4, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_WORKFLOWNODE_NODESTATE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3648, - serialized_end=3945, -) - - -_CREATEWORKFLOWTEMPLATEREQUEST = _descriptor.Descriptor( - name="CreateWorkflowTemplateRequest", - full_name="google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A*\022(dataproc.googleapis.com/WorkflowTemplate" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="template", - full_name="google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest.template", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3948, - serialized_end=4117, -) - - -_GETWORKFLOWTEMPLATEREQUEST = _descriptor.Descriptor( - name="GetWorkflowTemplateRequest", - full_name="google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A*\n(dataproc.googleapis.com/WorkflowTemplate" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest.version", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4119, - serialized_end=4228, -) - - -_INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY = _descriptor.Descriptor( - name="ParametersEntry", - full_name="google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.ParametersEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.ParametersEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.ParametersEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3370, - serialized_end=3419, -) - -_INSTANTIATEWORKFLOWTEMPLATEREQUEST = _descriptor.Descriptor( - name="InstantiateWorkflowTemplateRequest", - full_name="google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A*\n(dataproc.googleapis.com/WorkflowTemplate" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.version", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="instance_id", - full_name="google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.instance_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_id", - full_name="google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.request_id", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="parameters", - full_name="google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.parameters", - index=4, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4231, - serialized_end=4547, -) - - -_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST = _descriptor.Descriptor( - name="InstantiateInlineWorkflowTemplateRequest", - full_name="google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A*\022(dataproc.googleapis.com/WorkflowTemplate" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="template", - full_name="google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest.template", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="instance_id", - full_name="google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest.instance_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_id", - full_name="google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest.request_id", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4550, - serialized_end=4771, -) - - -_UPDATEWORKFLOWTEMPLATEREQUEST = _descriptor.Descriptor( - name="UpdateWorkflowTemplateRequest", - full_name="google.cloud.dataproc.v1beta2.UpdateWorkflowTemplateRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="template", - full_name="google.cloud.dataproc.v1beta2.UpdateWorkflowTemplateRequest.template", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4773, - serialized_end=4876, -) - - -_LISTWORKFLOWTEMPLATESREQUEST = _descriptor.Descriptor( - name="ListWorkflowTemplatesRequest", - full_name="google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4878, - serialized_end=4968, -) - - -_LISTWORKFLOWTEMPLATESRESPONSE = _descriptor.Descriptor( - name="ListWorkflowTemplatesResponse", - full_name="google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="templates", - full_name="google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse.templates", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4971, - serialized_end=5105, -) - - -_DELETEWORKFLOWTEMPLATEREQUEST = _descriptor.Descriptor( - name="DeleteWorkflowTemplateRequest", - full_name="google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A*\n(dataproc.googleapis.com/WorkflowTemplate" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest.version", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5107, - serialized_end=5219, -) - -_WORKFLOWTEMPLATE_LABELSENTRY.containing_type = _WORKFLOWTEMPLATE -_WORKFLOWTEMPLATE.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_WORKFLOWTEMPLATE.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_WORKFLOWTEMPLATE.fields_by_name["labels"].message_type = _WORKFLOWTEMPLATE_LABELSENTRY -_WORKFLOWTEMPLATE.fields_by_name["placement"].message_type = _WORKFLOWTEMPLATEPLACEMENT -_WORKFLOWTEMPLATE.fields_by_name["jobs"].message_type = _ORDEREDJOB -_WORKFLOWTEMPLATE.fields_by_name["parameters"].message_type = _TEMPLATEPARAMETER -_WORKFLOWTEMPLATEPLACEMENT.fields_by_name[ - "managed_cluster" -].message_type = _MANAGEDCLUSTER -_WORKFLOWTEMPLATEPLACEMENT.fields_by_name[ - "cluster_selector" -].message_type = _CLUSTERSELECTOR -_WORKFLOWTEMPLATEPLACEMENT.oneofs_by_name["placement"].fields.append( - _WORKFLOWTEMPLATEPLACEMENT.fields_by_name["managed_cluster"] -) -_WORKFLOWTEMPLATEPLACEMENT.fields_by_name[ - "managed_cluster" -].containing_oneof = _WORKFLOWTEMPLATEPLACEMENT.oneofs_by_name["placement"] -_WORKFLOWTEMPLATEPLACEMENT.oneofs_by_name["placement"].fields.append( - _WORKFLOWTEMPLATEPLACEMENT.fields_by_name["cluster_selector"] -) -_WORKFLOWTEMPLATEPLACEMENT.fields_by_name[ - "cluster_selector" -].containing_oneof = _WORKFLOWTEMPLATEPLACEMENT.oneofs_by_name["placement"] -_MANAGEDCLUSTER_LABELSENTRY.containing_type = _MANAGEDCLUSTER -_MANAGEDCLUSTER.fields_by_name[ - "config" -].message_type = ( - google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2._CLUSTERCONFIG -) -_MANAGEDCLUSTER.fields_by_name["labels"].message_type = _MANAGEDCLUSTER_LABELSENTRY -_CLUSTERSELECTOR_CLUSTERLABELSENTRY.containing_type = _CLUSTERSELECTOR -_CLUSTERSELECTOR.fields_by_name[ - "cluster_labels" -].message_type = _CLUSTERSELECTOR_CLUSTERLABELSENTRY -_ORDEREDJOB_LABELSENTRY.containing_type = _ORDEREDJOB -_ORDEREDJOB.fields_by_name[ - "hadoop_job" -].message_type = ( - google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2._HADOOPJOB -) -_ORDEREDJOB.fields_by_name[ - "spark_job" -].message_type = ( - google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2._SPARKJOB -) -_ORDEREDJOB.fields_by_name[ - "pyspark_job" -].message_type = ( - google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2._PYSPARKJOB -) -_ORDEREDJOB.fields_by_name[ - "hive_job" -].message_type = google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2._HIVEJOB -_ORDEREDJOB.fields_by_name[ - "pig_job" -].message_type = google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2._PIGJOB -_ORDEREDJOB.fields_by_name[ - "spark_sql_job" -].message_type = ( - google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2._SPARKSQLJOB -) -_ORDEREDJOB.fields_by_name["labels"].message_type = _ORDEREDJOB_LABELSENTRY -_ORDEREDJOB.fields_by_name[ - "scheduling" -].message_type = ( - google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2._JOBSCHEDULING -) -_ORDEREDJOB.oneofs_by_name["job_type"].fields.append( - _ORDEREDJOB.fields_by_name["hadoop_job"] -) -_ORDEREDJOB.fields_by_name["hadoop_job"].containing_oneof = _ORDEREDJOB.oneofs_by_name[ - "job_type" -] -_ORDEREDJOB.oneofs_by_name["job_type"].fields.append( - _ORDEREDJOB.fields_by_name["spark_job"] -) -_ORDEREDJOB.fields_by_name["spark_job"].containing_oneof = _ORDEREDJOB.oneofs_by_name[ - "job_type" -] -_ORDEREDJOB.oneofs_by_name["job_type"].fields.append( - _ORDEREDJOB.fields_by_name["pyspark_job"] -) -_ORDEREDJOB.fields_by_name["pyspark_job"].containing_oneof = _ORDEREDJOB.oneofs_by_name[ - "job_type" -] -_ORDEREDJOB.oneofs_by_name["job_type"].fields.append( - _ORDEREDJOB.fields_by_name["hive_job"] -) -_ORDEREDJOB.fields_by_name["hive_job"].containing_oneof = _ORDEREDJOB.oneofs_by_name[ - "job_type" -] -_ORDEREDJOB.oneofs_by_name["job_type"].fields.append( - _ORDEREDJOB.fields_by_name["pig_job"] -) -_ORDEREDJOB.fields_by_name["pig_job"].containing_oneof = _ORDEREDJOB.oneofs_by_name[ - "job_type" -] -_ORDEREDJOB.oneofs_by_name["job_type"].fields.append( - _ORDEREDJOB.fields_by_name["spark_sql_job"] -) -_ORDEREDJOB.fields_by_name[ - "spark_sql_job" -].containing_oneof = _ORDEREDJOB.oneofs_by_name["job_type"] -_TEMPLATEPARAMETER.fields_by_name["validation"].message_type = _PARAMETERVALIDATION -_PARAMETERVALIDATION.fields_by_name["regex"].message_type = _REGEXVALIDATION -_PARAMETERVALIDATION.fields_by_name["values"].message_type = _VALUEVALIDATION -_PARAMETERVALIDATION.oneofs_by_name["validation_type"].fields.append( - _PARAMETERVALIDATION.fields_by_name["regex"] -) -_PARAMETERVALIDATION.fields_by_name[ - "regex" -].containing_oneof = _PARAMETERVALIDATION.oneofs_by_name["validation_type"] -_PARAMETERVALIDATION.oneofs_by_name["validation_type"].fields.append( - _PARAMETERVALIDATION.fields_by_name["values"] -) -_PARAMETERVALIDATION.fields_by_name[ - "values" -].containing_oneof = _PARAMETERVALIDATION.oneofs_by_name["validation_type"] -_WORKFLOWMETADATA_PARAMETERSENTRY.containing_type = _WORKFLOWMETADATA -_WORKFLOWMETADATA.fields_by_name["create_cluster"].message_type = _CLUSTEROPERATION -_WORKFLOWMETADATA.fields_by_name["graph"].message_type = _WORKFLOWGRAPH -_WORKFLOWMETADATA.fields_by_name["delete_cluster"].message_type = _CLUSTEROPERATION -_WORKFLOWMETADATA.fields_by_name["state"].enum_type = _WORKFLOWMETADATA_STATE -_WORKFLOWMETADATA.fields_by_name[ - "parameters" -].message_type = _WORKFLOWMETADATA_PARAMETERSENTRY -_WORKFLOWMETADATA.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_WORKFLOWMETADATA.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_WORKFLOWMETADATA_STATE.containing_type = _WORKFLOWMETADATA -_WORKFLOWGRAPH.fields_by_name["nodes"].message_type = _WORKFLOWNODE -_WORKFLOWNODE.fields_by_name["state"].enum_type = _WORKFLOWNODE_NODESTATE -_WORKFLOWNODE_NODESTATE.containing_type = _WORKFLOWNODE -_CREATEWORKFLOWTEMPLATEREQUEST.fields_by_name[ - "template" -].message_type = _WORKFLOWTEMPLATE -_INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY.containing_type = ( - _INSTANTIATEWORKFLOWTEMPLATEREQUEST -) -_INSTANTIATEWORKFLOWTEMPLATEREQUEST.fields_by_name[ - "parameters" -].message_type = _INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY -_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST.fields_by_name[ - "template" -].message_type = _WORKFLOWTEMPLATE -_UPDATEWORKFLOWTEMPLATEREQUEST.fields_by_name[ - "template" -].message_type = _WORKFLOWTEMPLATE -_LISTWORKFLOWTEMPLATESRESPONSE.fields_by_name[ - "templates" -].message_type = _WORKFLOWTEMPLATE -DESCRIPTOR.message_types_by_name["WorkflowTemplate"] = _WORKFLOWTEMPLATE -DESCRIPTOR.message_types_by_name[ - "WorkflowTemplatePlacement" -] = _WORKFLOWTEMPLATEPLACEMENT -DESCRIPTOR.message_types_by_name["ManagedCluster"] = _MANAGEDCLUSTER -DESCRIPTOR.message_types_by_name["ClusterSelector"] = _CLUSTERSELECTOR -DESCRIPTOR.message_types_by_name["OrderedJob"] = _ORDEREDJOB -DESCRIPTOR.message_types_by_name["TemplateParameter"] = _TEMPLATEPARAMETER -DESCRIPTOR.message_types_by_name["ParameterValidation"] = _PARAMETERVALIDATION -DESCRIPTOR.message_types_by_name["RegexValidation"] = _REGEXVALIDATION -DESCRIPTOR.message_types_by_name["ValueValidation"] = _VALUEVALIDATION -DESCRIPTOR.message_types_by_name["WorkflowMetadata"] = _WORKFLOWMETADATA -DESCRIPTOR.message_types_by_name["ClusterOperation"] = _CLUSTEROPERATION -DESCRIPTOR.message_types_by_name["WorkflowGraph"] = _WORKFLOWGRAPH -DESCRIPTOR.message_types_by_name["WorkflowNode"] = _WORKFLOWNODE -DESCRIPTOR.message_types_by_name[ - "CreateWorkflowTemplateRequest" -] = _CREATEWORKFLOWTEMPLATEREQUEST -DESCRIPTOR.message_types_by_name[ - "GetWorkflowTemplateRequest" -] = _GETWORKFLOWTEMPLATEREQUEST -DESCRIPTOR.message_types_by_name[ - "InstantiateWorkflowTemplateRequest" -] = _INSTANTIATEWORKFLOWTEMPLATEREQUEST -DESCRIPTOR.message_types_by_name[ - "InstantiateInlineWorkflowTemplateRequest" -] = _INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST -DESCRIPTOR.message_types_by_name[ - "UpdateWorkflowTemplateRequest" -] = _UPDATEWORKFLOWTEMPLATEREQUEST -DESCRIPTOR.message_types_by_name[ - "ListWorkflowTemplatesRequest" -] = _LISTWORKFLOWTEMPLATESREQUEST -DESCRIPTOR.message_types_by_name[ - "ListWorkflowTemplatesResponse" -] = _LISTWORKFLOWTEMPLATESRESPONSE -DESCRIPTOR.message_types_by_name[ - "DeleteWorkflowTemplateRequest" -] = _DELETEWORKFLOWTEMPLATEREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -WorkflowTemplate = _reflection.GeneratedProtocolMessageType( - "WorkflowTemplate", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_WORKFLOWTEMPLATE_LABELSENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.WorkflowTemplate.LabelsEntry) - ), - ), - DESCRIPTOR=_WORKFLOWTEMPLATE, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A Cloud Dataproc workflow template resource. - - - Attributes: - id: - Required. The template id. The id must contain only letters - (a-z, A-Z), numbers (0-9), underscores (\_), and hyphens (-). - Cannot begin or end with underscore or hyphen. Must consist of - between 3 and 50 characters. . - name: - Output only. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates``, the resource name of - the template has the following format: ``projects/{proje - ct_id}/regions/{region}/workflowTemplates/{template_id}`` - - For ``projects.locations.workflowTemplates``, the resource - name of the template has the following format: ``project - s/{project_id}/locations/{location}/workflowTemplates/{templat - e_id}`` - version: - Optional. Used to perform a consistent read-modify-write. - This field should be left blank for a - ``CreateWorkflowTemplate`` request. It is required for an - ``UpdateWorkflowTemplate`` request, and must match the current - server version. A typical update template flow would fetch the - current template with a ``GetWorkflowTemplate`` request, which - will return the current template with the ``version`` field - filled in with the current server version. The user updates - other fields in the template, then returns it as part of the - ``UpdateWorkflowTemplate`` request. - create_time: - Output only. The time template was created. - update_time: - Output only. The time template was last updated. - labels: - Optional. The labels to associate with this template. These - labels will be propagated to all jobs and clusters created by - the workflow instance. Label **keys** must contain 1 to 63 - characters, and must conform to `RFC 1035 - `__. Label **values** - may be empty, but, if present, must contain 1 to 63 - characters, and must conform to `RFC 1035 - `__. No more than 32 - labels can be associated with a template. - placement: - Required. WorkflowTemplate scheduling information. - jobs: - Required. The Directed Acyclic Graph of Jobs to submit. - parameters: - Optional. Template parameters whose values are substituted - into the template. Values for parameters must be provided when - the template is instantiated. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.WorkflowTemplate) - ), -) -_sym_db.RegisterMessage(WorkflowTemplate) -_sym_db.RegisterMessage(WorkflowTemplate.LabelsEntry) - -WorkflowTemplatePlacement = _reflection.GeneratedProtocolMessageType( - "WorkflowTemplatePlacement", - (_message.Message,), - dict( - DESCRIPTOR=_WORKFLOWTEMPLATEPLACEMENT, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""Specifies workflow execution target. - - Either ``managed_cluster`` or ``cluster_selector`` is required. - - - Attributes: - placement: - Required. Specifies where workflow executes; either on a - managed cluster or an existing cluster chosen by labels. - managed_cluster: - Optional. A cluster that is managed by the workflow. - cluster_selector: - Optional. A selector that chooses target cluster for jobs - based on metadata. The selector is evaluated at the time each - job is submitted. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.WorkflowTemplatePlacement) - ), -) -_sym_db.RegisterMessage(WorkflowTemplatePlacement) - -ManagedCluster = _reflection.GeneratedProtocolMessageType( - "ManagedCluster", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_MANAGEDCLUSTER_LABELSENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ManagedCluster.LabelsEntry) - ), - ), - DESCRIPTOR=_MANAGEDCLUSTER, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""Cluster that is managed by the workflow. - - - Attributes: - cluster_name: - Required. The cluster name prefix. A unique cluster name will - be formed by appending a random suffix. The name must contain - only lower-case letters (a-z), numbers (0-9), and hyphens (-). - Must begin with a letter. Cannot begin or end with hyphen. - Must consist of between 2 and 35 characters. - config: - Required. The cluster configuration. - labels: - Optional. The labels to associate with this cluster. Label - keys must be between 1 and 63 characters long. Label values must be between - 1 and 63 characters long. No more than 32 - labels can be associated with a given cluster. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ManagedCluster) - ), -) -_sym_db.RegisterMessage(ManagedCluster) -_sym_db.RegisterMessage(ManagedCluster.LabelsEntry) - -ClusterSelector = _reflection.GeneratedProtocolMessageType( - "ClusterSelector", - (_message.Message,), - dict( - ClusterLabelsEntry=_reflection.GeneratedProtocolMessageType( - "ClusterLabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTERSELECTOR_CLUSTERLABELSENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ClusterSelector.ClusterLabelsEntry) - ), - ), - DESCRIPTOR=_CLUSTERSELECTOR, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A selector that chooses target cluster for jobs based on - metadata. - - - Attributes: - zone: - Optional. The zone where workflow process executes. This - parameter does not affect the selection of the cluster. If - unspecified, the zone of the first cluster matching the - selector is used. - cluster_labels: - Required. The cluster labels. Cluster must have all labels to - match. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ClusterSelector) - ), -) -_sym_db.RegisterMessage(ClusterSelector) -_sym_db.RegisterMessage(ClusterSelector.ClusterLabelsEntry) - -OrderedJob = _reflection.GeneratedProtocolMessageType( - "OrderedJob", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_ORDEREDJOB_LABELSENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.OrderedJob.LabelsEntry) - ), - ), - DESCRIPTOR=_ORDEREDJOB, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A job executed by the workflow. - - - Attributes: - step_id: - Required. The step id. The id must be unique among all jobs - within the template. The step id is used as prefix for job - id, as job ``goog-dataproc-workflow-step-id`` label, and in [p - rerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.p - rerequisite\_step\_ids] field from other steps. The id must - contain only letters (a-z, A-Z), numbers (0-9), underscores - (\_), and hyphens (-). Cannot begin or end with underscore or - hyphen. Must consist of between 3 and 50 characters. - job_type: - Required. The job definition. - hadoop_job: - Job is a Hadoop job. - spark_job: - Job is a Spark job. - pyspark_job: - Job is a Pyspark job. - hive_job: - Job is a Hive job. - pig_job: - Job is a Pig job. - spark_sql_job: - Job is a SparkSql job. - labels: - Optional. The labels to associate with this job. Label keys - must be between 1 and 63 characters long. Label values must be between - 1 and 63 characters long. No more than 32 labels can be - associated with a given job. - scheduling: - Optional. Job scheduling configuration. - prerequisite_step_ids: - Optional. The optional list of prerequisite job step\_ids. If - not specified, the job will start at the beginning of - workflow. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.OrderedJob) - ), -) -_sym_db.RegisterMessage(OrderedJob) -_sym_db.RegisterMessage(OrderedJob.LabelsEntry) - -TemplateParameter = _reflection.GeneratedProtocolMessageType( - "TemplateParameter", - (_message.Message,), - dict( - DESCRIPTOR=_TEMPLATEPARAMETER, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A configurable parameter that replaces one or more fields - in the template. Parameterizable fields: - Labels - File uris - Job - properties - Job arguments - Script variables - Main class (in HadoopJob - and SparkJob) - Zone (in ClusterSelector) - - - Attributes: - name: - Required. Parameter name. The parameter name is used as the - key, and paired with the parameter value, which are passed to - the template when the template is instantiated. The name must - contain only capital letters (A-Z), numbers (0-9), and - underscores (\_), and must not start with a number. The - maximum length is 40 characters. - fields: - Required. Paths to all fields that the parameter replaces. A - field is allowed to appear in at most one parameter's list of - field paths. A field path is similar in syntax to a - [google.protobuf.FieldMask][google.protobuf.FieldMask]. For - example, a field path that references the zone field of a - workflow template's cluster selector would be specified as - ``placement.clusterSelector.zone``. Also, field paths can - reference fields using the following syntax: - Values in - maps can be referenced by key: - labels['key'] - - placement.clusterSelector.clusterLabels['key'] - - placement.managedCluster.labels['key'] - - placement.clusterSelector.clusterLabels['key'] - - jobs['step-id'].labels['key'] - Jobs in the jobs list can be - referenced by step-id: - jobs['step- - id'].hadoopJob.mainJarFileUri - jobs['step- - id'].hiveJob.queryFileUri - jobs['step- - id'].pySparkJob.mainPythonFileUri - jobs['step- - id'].hadoopJob.jarFileUris[0] - jobs['step- - id'].hadoopJob.archiveUris[0] - jobs['step- - id'].hadoopJob.fileUris[0] - jobs['step- - id'].pySparkJob.pythonFileUris[0] - Items in repeated fields - can be referenced by a zero-based index: - jobs['step- - id'].sparkJob.args[0] - Other examples: - jobs['step- - id'].hadoopJob.properties['key'] - jobs['step- - id'].hadoopJob.args[0] - jobs['step- - id'].hiveJob.scriptVariables['key'] - jobs['step- - id'].hadoopJob.mainJarFileUri - - placement.clusterSelector.zone It may not be possible to - parameterize maps and repeated fields in their entirety since - only individual map values and individual items in repeated - fields can be referenced. For example, the following field - paths are invalid: - placement.clusterSelector.clusterLabels - - jobs['step-id'].sparkJob.args - description: - Optional. Brief description of the parameter. Must not exceed - 1024 characters. - validation: - Optional. Validation rules to be applied to this parameter's - value. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.TemplateParameter) - ), -) -_sym_db.RegisterMessage(TemplateParameter) - -ParameterValidation = _reflection.GeneratedProtocolMessageType( - "ParameterValidation", - (_message.Message,), - dict( - DESCRIPTOR=_PARAMETERVALIDATION, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""Configuration for parameter validation. - - - Attributes: - validation_type: - Required. The type of validation to be performed. - regex: - Validation based on regular expressions. - values: - Validation based on a list of allowed values. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ParameterValidation) - ), -) -_sym_db.RegisterMessage(ParameterValidation) - -RegexValidation = _reflection.GeneratedProtocolMessageType( - "RegexValidation", - (_message.Message,), - dict( - DESCRIPTOR=_REGEXVALIDATION, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""Validation based on regular expressions. - - - Attributes: - regexes: - Required. RE2 regular expressions used to validate the - parameter's value. The value must match the regex in its - entirety (substring matches are not sufficient). - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.RegexValidation) - ), -) -_sym_db.RegisterMessage(RegexValidation) - -ValueValidation = _reflection.GeneratedProtocolMessageType( - "ValueValidation", - (_message.Message,), - dict( - DESCRIPTOR=_VALUEVALIDATION, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""Validation based on a list of allowed values. - - - Attributes: - values: - Required. List of allowed values for the parameter. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ValueValidation) - ), -) -_sym_db.RegisterMessage(ValueValidation) - -WorkflowMetadata = _reflection.GeneratedProtocolMessageType( - "WorkflowMetadata", - (_message.Message,), - dict( - ParametersEntry=_reflection.GeneratedProtocolMessageType( - "ParametersEntry", - (_message.Message,), - dict( - DESCRIPTOR=_WORKFLOWMETADATA_PARAMETERSENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.WorkflowMetadata.ParametersEntry) - ), - ), - DESCRIPTOR=_WORKFLOWMETADATA, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A Cloud Dataproc workflow template resource. - - - Attributes: - template: - Output only. The resource name of the workflow template as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates``, the resource name of - the template has the following format: ``projects/{proje - ct_id}/regions/{region}/workflowTemplates/{template_id}`` - - For ``projects.locations.workflowTemplates``, the resource - name of the template has the following format: ``project - s/{project_id}/locations/{location}/workflowTemplates/{templat - e_id}`` - version: - Output only. The version of template at the time of workflow - instantiation. - create_cluster: - Output only. The create cluster operation metadata. - graph: - Output only. The workflow graph. - delete_cluster: - Output only. The delete cluster operation metadata. - state: - Output only. The workflow state. - cluster_name: - Output only. The name of the target cluster. - parameters: - Map from parameter names to values that were used for those - parameters. - start_time: - Output only. Workflow start time. - end_time: - Output only. Workflow end time. - cluster_uuid: - Output only. The UUID of target cluster. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.WorkflowMetadata) - ), -) -_sym_db.RegisterMessage(WorkflowMetadata) -_sym_db.RegisterMessage(WorkflowMetadata.ParametersEntry) - -ClusterOperation = _reflection.GeneratedProtocolMessageType( - "ClusterOperation", - (_message.Message,), - dict( - DESCRIPTOR=_CLUSTEROPERATION, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""The cluster operation triggered by a workflow. - - - Attributes: - operation_id: - Output only. The id of the cluster operation. - error: - Output only. Error, if operation failed. - done: - Output only. Indicates the operation is done. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ClusterOperation) - ), -) -_sym_db.RegisterMessage(ClusterOperation) - -WorkflowGraph = _reflection.GeneratedProtocolMessageType( - "WorkflowGraph", - (_message.Message,), - dict( - DESCRIPTOR=_WORKFLOWGRAPH, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""The workflow graph. - - - Attributes: - nodes: - Output only. The workflow nodes. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.WorkflowGraph) - ), -) -_sym_db.RegisterMessage(WorkflowGraph) - -WorkflowNode = _reflection.GeneratedProtocolMessageType( - "WorkflowNode", - (_message.Message,), - dict( - DESCRIPTOR=_WORKFLOWNODE, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""The workflow node. - - - Attributes: - step_id: - Output only. The name of the node. - prerequisite_step_ids: - Output only. Node's prerequisite nodes. - job_id: - Output only. The job id; populated after the node enters - RUNNING state. - state: - Output only. The node state. - error: - Output only. The error detail. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.WorkflowNode) - ), -) -_sym_db.RegisterMessage(WorkflowNode) - -CreateWorkflowTemplateRequest = _reflection.GeneratedProtocolMessageType( - "CreateWorkflowTemplateRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATEWORKFLOWTEMPLATEREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A request to create a workflow template. - - - Attributes: - parent: - Required. The resource name of the region or location, as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates,create``, the resource - name of the region has the following format: - ``projects/{project_id}/regions/{region}`` - For - ``projects.locations.workflowTemplates.create``, the resource - name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - template: - Required. The Dataproc workflow template to create. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest) - ), -) -_sym_db.RegisterMessage(CreateWorkflowTemplateRequest) - -GetWorkflowTemplateRequest = _reflection.GeneratedProtocolMessageType( - "GetWorkflowTemplateRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETWORKFLOWTEMPLATEREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A request to fetch a workflow template. - - - Attributes: - name: - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates.get``, the resource name - of the template has the following format: ``projects/{pr - oject_id}/regions/{region}/workflowTemplates/{template_id}`` - - For ``projects.locations.workflowTemplates.get``, the - resource name of the template has the following format: - ``projects/{project_id}/locations/{location}/workflowTemplates - /{template_id}`` - version: - Optional. The version of workflow template to retrieve. Only - previously instantiated versions can be retrieved. If - unspecified, retrieves the current version. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest) - ), -) -_sym_db.RegisterMessage(GetWorkflowTemplateRequest) - -InstantiateWorkflowTemplateRequest = _reflection.GeneratedProtocolMessageType( - "InstantiateWorkflowTemplateRequest", - (_message.Message,), - dict( - ParametersEntry=_reflection.GeneratedProtocolMessageType( - "ParametersEntry", - (_message.Message,), - dict( - DESCRIPTOR=_INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.ParametersEntry) - ), - ), - DESCRIPTOR=_INSTANTIATEWORKFLOWTEMPLATEREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A request to instantiate a workflow template. - - - Attributes: - name: - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates.instantiate``, the - resource name of the template has the following format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{te - mplate_id}`` - For - ``projects.locations.workflowTemplates.instantiate``, the - resource name of the template has the following format: ``p - rojects/{project_id}/locations/{location}/workflowTemplates/{t - emplate_id}`` - version: - Optional. The version of workflow template to instantiate. If - specified, the workflow will be instantiated only if the - current version of the workflow template has the supplied - version. This option cannot be used to instantiate a previous - version of workflow template. - instance_id: - Deprecated. Please use ``request_id`` field instead. - request_id: - Optional. A tag that prevents multiple concurrent workflow - instances with the same tag from running. This mitigates risk - of concurrent instances started due to retries. It is - recommended to always set this value to a `UUID `__. The tag - must contain only letters (a-z, A-Z), numbers (0-9), - underscores (\_), and hyphens (-). The maximum length is 40 - characters. - parameters: - Optional. Map from parameter names to values that should be - used for those parameters. Values may not exceed 100 - characters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest) - ), -) -_sym_db.RegisterMessage(InstantiateWorkflowTemplateRequest) -_sym_db.RegisterMessage(InstantiateWorkflowTemplateRequest.ParametersEntry) - -InstantiateInlineWorkflowTemplateRequest = _reflection.GeneratedProtocolMessageType( - "InstantiateInlineWorkflowTemplateRequest", - (_message.Message,), - dict( - DESCRIPTOR=_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A request to instantiate an inline workflow template. - - - Attributes: - parent: - Required. The resource name of the region or location, as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates,instantiateinline``, the - resource name of the region has the following format: - ``projects/{project_id}/regions/{region}`` - For - ``projects.locations.workflowTemplates.instantiateinline``, - the resource name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - template: - Required. The workflow template to instantiate. - instance_id: - Deprecated. Please use ``request_id`` field instead. - request_id: - Optional. A tag that prevents multiple concurrent workflow - instances with the same tag from running. This mitigates risk - of concurrent instances started due to retries. It is - recommended to always set this value to a `UUID `__. The tag - must contain only letters (a-z, A-Z), numbers (0-9), - underscores (\_), and hyphens (-). The maximum length is 40 - characters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest) - ), -) -_sym_db.RegisterMessage(InstantiateInlineWorkflowTemplateRequest) - -UpdateWorkflowTemplateRequest = _reflection.GeneratedProtocolMessageType( - "UpdateWorkflowTemplateRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATEWORKFLOWTEMPLATEREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A request to update a workflow template. - - - Attributes: - template: - Required. The updated workflow template. The - ``template.version`` field must match the current version. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.UpdateWorkflowTemplateRequest) - ), -) -_sym_db.RegisterMessage(UpdateWorkflowTemplateRequest) - -ListWorkflowTemplatesRequest = _reflection.GeneratedProtocolMessageType( - "ListWorkflowTemplatesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTWORKFLOWTEMPLATESREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A request to list workflow templates in a project. - - - Attributes: - parent: - Required. The resource name of the region or location, as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates,list``, the resource name - of the region has the following format: - ``projects/{project_id}/regions/{region}`` - For - ``projects.locations.workflowTemplates.list``, the resource - name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - page_size: - Optional. The maximum number of results to return in each - response. - page_token: - Optional. The page token, returned by a previous call, to - request the next page of results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest) - ), -) -_sym_db.RegisterMessage(ListWorkflowTemplatesRequest) - -ListWorkflowTemplatesResponse = _reflection.GeneratedProtocolMessageType( - "ListWorkflowTemplatesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTWORKFLOWTEMPLATESRESPONSE, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A response to a request to list workflow templates in a - project. - - - Attributes: - templates: - Output only. WorkflowTemplates list. - next_page_token: - Output only. This token is included in the response if there - are more results to fetch. To fetch additional results, - provide this value as the page\_token in a subsequent - ListWorkflowTemplatesRequest. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse) - ), -) -_sym_db.RegisterMessage(ListWorkflowTemplatesResponse) - -DeleteWorkflowTemplateRequest = _reflection.GeneratedProtocolMessageType( - "DeleteWorkflowTemplateRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEWORKFLOWTEMPLATEREQUEST, - __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A request to delete a workflow template. - - Currently started workflows will remain running. - - - Attributes: - name: - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource\_names. - For - ``projects.regions.workflowTemplates.delete``, the resource - name of the template has the following format: ``project - s/{project_id}/regions/{region}/workflowTemplates/{template_id - }`` - For - ``projects.locations.workflowTemplates.instantiate``, the - resource name of the template has the following format: ``p - rojects/{project_id}/locations/{location}/workflowTemplates/{t - emplate_id}`` - version: - Optional. The version of workflow template to delete. If - specified, will only delete the template if the current server - version matches specified version. - """, - # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest) - ), -) -_sym_db.RegisterMessage(DeleteWorkflowTemplateRequest) - - -DESCRIPTOR._options = None -_WORKFLOWTEMPLATE_LABELSENTRY._options = None -_WORKFLOWTEMPLATE.fields_by_name["id"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["name"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["version"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["create_time"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["update_time"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["labels"]._options = None -_WORKFLOWTEMPLATE.fields_by_name["parameters"]._options = None -_WORKFLOWTEMPLATE._options = None -_MANAGEDCLUSTER_LABELSENTRY._options = None -_CLUSTERSELECTOR_CLUSTERLABELSENTRY._options = None -_ORDEREDJOB_LABELSENTRY._options = None -_WORKFLOWMETADATA_PARAMETERSENTRY._options = None -_WORKFLOWMETADATA.fields_by_name["template"]._options = None -_WORKFLOWMETADATA.fields_by_name["version"]._options = None -_WORKFLOWMETADATA.fields_by_name["create_cluster"]._options = None -_WORKFLOWMETADATA.fields_by_name["graph"]._options = None -_WORKFLOWMETADATA.fields_by_name["delete_cluster"]._options = None -_WORKFLOWMETADATA.fields_by_name["state"]._options = None -_WORKFLOWMETADATA.fields_by_name["cluster_name"]._options = None -_WORKFLOWMETADATA.fields_by_name["start_time"]._options = None -_WORKFLOWMETADATA.fields_by_name["end_time"]._options = None -_WORKFLOWMETADATA.fields_by_name["cluster_uuid"]._options = None -_CLUSTEROPERATION.fields_by_name["operation_id"]._options = None -_CLUSTEROPERATION.fields_by_name["error"]._options = None -_CLUSTEROPERATION.fields_by_name["done"]._options = None -_WORKFLOWGRAPH.fields_by_name["nodes"]._options = None -_WORKFLOWNODE.fields_by_name["step_id"]._options = None -_WORKFLOWNODE.fields_by_name["prerequisite_step_ids"]._options = None -_WORKFLOWNODE.fields_by_name["job_id"]._options = None -_WORKFLOWNODE.fields_by_name["state"]._options = None -_WORKFLOWNODE.fields_by_name["error"]._options = None -_CREATEWORKFLOWTEMPLATEREQUEST.fields_by_name["parent"]._options = None -_CREATEWORKFLOWTEMPLATEREQUEST.fields_by_name["template"]._options = None -_GETWORKFLOWTEMPLATEREQUEST.fields_by_name["name"]._options = None -_INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY._options = None -_INSTANTIATEWORKFLOWTEMPLATEREQUEST.fields_by_name["name"]._options = None -_INSTANTIATEWORKFLOWTEMPLATEREQUEST.fields_by_name["instance_id"]._options = None -_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST.fields_by_name["parent"]._options = None -_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST.fields_by_name["template"]._options = None -_UPDATEWORKFLOWTEMPLATEREQUEST.fields_by_name["template"]._options = None -_LISTWORKFLOWTEMPLATESREQUEST.fields_by_name["parent"]._options = None -_LISTWORKFLOWTEMPLATESRESPONSE.fields_by_name["templates"]._options = None -_LISTWORKFLOWTEMPLATESRESPONSE.fields_by_name["next_page_token"]._options = None -_DELETEWORKFLOWTEMPLATEREQUEST.fields_by_name["name"]._options = None - -_WORKFLOWTEMPLATESERVICE = _descriptor.ServiceDescriptor( - name="WorkflowTemplateService", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplateService", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" - ), - serialized_start=5222, - serialized_end=7503, - methods=[ - _descriptor.MethodDescriptor( - name="CreateWorkflowTemplate", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate", - index=0, - containing_service=None, - input_type=_CREATEWORKFLOWTEMPLATEREQUEST, - output_type=_WORKFLOWTEMPLATE, - serialized_options=_b( - '\202\323\344\223\002\214\001"8/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:\010templateZF":/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:\010template\332A\020parent, template' - ), - ), - _descriptor.MethodDescriptor( - name="GetWorkflowTemplate", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplateService.GetWorkflowTemplate", - index=1, - containing_service=None, - input_type=_GETWORKFLOWTEMPLATEREQUEST, - output_type=_WORKFLOWTEMPLATE, - serialized_options=_b( - "\202\323\344\223\002x\0228/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<\022:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="InstantiateWorkflowTemplate", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate", - index=2, - containing_service=None, - input_type=_INSTANTIATEWORKFLOWTEMPLATEREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - '\202\323\344\223\002\226\001"D/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\001*ZK"F/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\001*\332A\004name\332A\020name, parameters\312A)\n\025google.protobuf.Empty\022\020WorkflowMetadata' - ), - ), - _descriptor.MethodDescriptor( - name="InstantiateInlineWorkflowTemplate", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateInlineWorkflowTemplate", - index=3, - containing_service=None, - input_type=_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - '\202\323\344\223\002\260\001"L/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\010templateZV"J/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\010template\332A\020parent, template\312A)\n\025google.protobuf.Empty\022\020WorkflowMetadata' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateWorkflowTemplate", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplateService.UpdateWorkflowTemplate", - index=4, - containing_service=None, - input_type=_UPDATEWORKFLOWTEMPLATEREQUEST, - output_type=_WORKFLOWTEMPLATE, - serialized_options=_b( - "\202\323\344\223\002\236\001\032A/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}:\010templateZO\032C/v1beta2/{template.name=projects/*/locations/*/workflowTemplates/*}:\010template\332A\010template" - ), - ), - _descriptor.MethodDescriptor( - name="ListWorkflowTemplates", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplateService.ListWorkflowTemplates", - index=5, - containing_service=None, - input_type=_LISTWORKFLOWTEMPLATESREQUEST, - output_type=_LISTWORKFLOWTEMPLATESRESPONSE, - serialized_options=_b( - "\202\323\344\223\002x\0228/v1beta2/{parent=projects/*/regions/*}/workflowTemplatesZ<\022:/v1beta2/{parent=projects/*/locations/*}/workflowTemplates\332A\006parent" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteWorkflowTemplate", - full_name="google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate", - index=6, - containing_service=None, - input_type=_DELETEWORKFLOWTEMPLATEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002x*8/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<*:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\332A\004name" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_WORKFLOWTEMPLATESERVICE) - -DESCRIPTOR.services_by_name["WorkflowTemplateService"] = _WORKFLOWTEMPLATESERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py deleted file mode 100644 index e05372f50416..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py +++ /dev/null @@ -1,203 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.dataproc_v1beta2.proto import ( - workflow_templates_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class WorkflowTemplateServiceStub(object): - """The API interface for managing Workflow Templates in the - Cloud Dataproc API. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateWorkflowTemplate = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/CreateWorkflowTemplate", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.CreateWorkflowTemplateRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.WorkflowTemplate.FromString, - ) - self.GetWorkflowTemplate = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/GetWorkflowTemplate", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.GetWorkflowTemplateRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.WorkflowTemplate.FromString, - ) - self.InstantiateWorkflowTemplate = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/InstantiateWorkflowTemplate", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.InstantiateWorkflowTemplateRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.InstantiateInlineWorkflowTemplate = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/InstantiateInlineWorkflowTemplate", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.InstantiateInlineWorkflowTemplateRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.UpdateWorkflowTemplate = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/UpdateWorkflowTemplate", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.UpdateWorkflowTemplateRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.WorkflowTemplate.FromString, - ) - self.ListWorkflowTemplates = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/ListWorkflowTemplates", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.ListWorkflowTemplatesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.ListWorkflowTemplatesResponse.FromString, - ) - self.DeleteWorkflowTemplate = channel.unary_unary( - "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/DeleteWorkflowTemplate", - request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.DeleteWorkflowTemplateRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - -class WorkflowTemplateServiceServicer(object): - """The API interface for managing Workflow Templates in the - Cloud Dataproc API. - """ - - def CreateWorkflowTemplate(self, request, context): - """Creates new workflow template. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetWorkflowTemplate(self, request, context): - """Retrieves the latest workflow template. - - Can retrieve previously instantiated template by specifying optional - version parameter. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def InstantiateWorkflowTemplate(self, request, context): - """Instantiates a template and begins execution. - - The returned Operation can be used to track execution of - workflow by polling - [operations.get][google.longrunning.Operations.GetOperation]. - The Operation will complete when entire workflow is finished. - - The running workflow can be aborted via - [operations.cancel][google.longrunning.Operations.CancelOperation]. - This will cause any inflight jobs to be cancelled and workflow-owned - clusters to be deleted. - - The [Operation.metadata][google.longrunning.Operation.metadata] will be - [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#workflowmetadata). - Also see [Using - WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). - - On successful completion, - [Operation.response][google.longrunning.Operation.response] will be - [Empty][google.protobuf.Empty]. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def InstantiateInlineWorkflowTemplate(self, request, context): - """Instantiates a template and begins execution. - - This method is equivalent to executing the sequence - [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate], - [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate]. - - The returned Operation can be used to track execution of - workflow by polling - [operations.get][google.longrunning.Operations.GetOperation]. - The Operation will complete when entire workflow is finished. - - The running workflow can be aborted via - [operations.cancel][google.longrunning.Operations.CancelOperation]. - This will cause any inflight jobs to be cancelled and workflow-owned - clusters to be deleted. - - The [Operation.metadata][google.longrunning.Operation.metadata] will be - [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). - Also see [Using - WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). - - On successful completion, - [Operation.response][google.longrunning.Operation.response] will be - [Empty][google.protobuf.Empty]. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateWorkflowTemplate(self, request, context): - """Updates (replaces) workflow template. The updated template - must contain version that matches the current server version. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListWorkflowTemplates(self, request, context): - """Lists workflows that match the specified filter in the request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteWorkflowTemplate(self, request, context): - """Deletes a workflow template. It does not cancel in-progress workflows. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_WorkflowTemplateServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "CreateWorkflowTemplate": grpc.unary_unary_rpc_method_handler( - servicer.CreateWorkflowTemplate, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.CreateWorkflowTemplateRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.WorkflowTemplate.SerializeToString, - ), - "GetWorkflowTemplate": grpc.unary_unary_rpc_method_handler( - servicer.GetWorkflowTemplate, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.GetWorkflowTemplateRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.WorkflowTemplate.SerializeToString, - ), - "InstantiateWorkflowTemplate": grpc.unary_unary_rpc_method_handler( - servicer.InstantiateWorkflowTemplate, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.InstantiateWorkflowTemplateRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "InstantiateInlineWorkflowTemplate": grpc.unary_unary_rpc_method_handler( - servicer.InstantiateInlineWorkflowTemplate, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.InstantiateInlineWorkflowTemplateRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "UpdateWorkflowTemplate": grpc.unary_unary_rpc_method_handler( - servicer.UpdateWorkflowTemplate, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.UpdateWorkflowTemplateRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.WorkflowTemplate.SerializeToString, - ), - "ListWorkflowTemplates": grpc.unary_unary_rpc_method_handler( - servicer.ListWorkflowTemplates, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.ListWorkflowTemplatesRequest.FromString, - response_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.ListWorkflowTemplatesResponse.SerializeToString, - ), - "DeleteWorkflowTemplate": grpc.unary_unary_rpc_method_handler( - servicer.DeleteWorkflowTemplate, - request_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.DeleteWorkflowTemplateRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.dataproc.v1beta2.WorkflowTemplateService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/dataproc/google/cloud/dataproc_v1beta2/types.py b/dataproc/google/cloud/dataproc_v1beta2/types.py deleted file mode 100644 index 23d3f87ebab8..000000000000 --- a/dataproc/google/cloud/dataproc_v1beta2/types.py +++ /dev/null @@ -1,68 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys - -from google.api_core.protobuf_helpers import get_messages - -from google.cloud.dataproc_v1beta2.proto import autoscaling_policies_pb2 -from google.cloud.dataproc_v1beta2.proto import clusters_pb2 -from google.cloud.dataproc_v1beta2.proto import jobs_pb2 -from google.cloud.dataproc_v1beta2.proto import operations_pb2 as proto_operations_pb2 -from google.cloud.dataproc_v1beta2.proto import workflow_templates_pb2 -from google.longrunning import operations_pb2 as longrunning_operations_pb2 -from google.protobuf import any_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 -from google.rpc import status_pb2 - - -_shared_modules = [ - longrunning_operations_pb2, - any_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - status_pb2, -] - -_local_modules = [ - autoscaling_policies_pb2, - clusters_pb2, - jobs_pb2, - proto_operations_pb2, - workflow_templates_pb2, -] - -names = [] - -for module in _shared_modules: # pragma: NO COVER - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.dataproc_v1beta2.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - - -__all__ = tuple(sorted(names)) diff --git a/dataproc/noxfile.py b/dataproc/noxfile.py deleted file mode 100644 index 7949a4e3925a..000000000000 --- a/dataproc/noxfile.py +++ /dev/null @@ -1,160 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! - -from __future__ import absolute_import -import os -import shutil - -import nox - - -LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) -BLACK_VERSION = "black==19.3b0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -if os.path.exists("samples"): - BLACK_PATHS.append("samples") - - -@nox.session(python="3.7") -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) - session.run("black", "--check", *BLACK_PATHS) - session.run("flake8", "google", "tests") - - -@nox.session(python="3.6") -def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ - session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) - - -@nox.session(python="3.7") -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def default(session): - # Install all test dependencies, then install this package in-place. - session.install("mock", "pytest", "pytest-cov") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", ".") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - "--cov=google.cloud", - "--cov=tests.unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - ) - - -@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"]) -def unit(session): - """Run the unit test suite.""" - default(session) - - -@nox.session(python=["2.7", "3.7"]) -def system(session): - """Run the system test suite.""" - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", "../test_utils/") - session.install("-e", ".") - - # Run py.test against the system tests. - if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) - if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) - - -@nox.session(python="3.7") -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python="3.7") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/dataproc/setup.cfg b/dataproc/setup.cfg deleted file mode 100644 index 3bd555500e37..000000000000 --- a/dataproc/setup.cfg +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 diff --git a/dataproc/setup.py b/dataproc/setup.py deleted file mode 100644 index 6d69fd1b5d36..000000000000 --- a/dataproc/setup.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import io -import os - -import setuptools - - -# Package metadata. - -name = "google-cloud-dataproc" -description = "Google Cloud Dataproc API client library" -version = "0.6.1" -# Should be one of: -# 'Development Status :: 3 - Alpha' -# 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 3 - Alpha" -dependencies = ["google-api-core[grpc] >= 1.14.0, < 2.0.0dev"] -extras = {} - - -# Setup boilerplate below this line. - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -# Only include packages under the 'google' namespace. Do not include tests, -# benchmarks, etc. -packages = [ - package for package in setuptools.find_packages() if package.startswith("google") -] - -# Determine which namespaces are needed. -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url="https://github.com/GoogleCloudPlatform/google-cloud-python", - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - namespace_packages=namespaces, - install_requires=dependencies, - extras_require=extras, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", - include_package_data=True, - zip_safe=False, -) diff --git a/dataproc/synth.metadata b/dataproc/synth.metadata deleted file mode 100644 index 314ca5cec0e4..000000000000 --- a/dataproc/synth.metadata +++ /dev/null @@ -1,361 +0,0 @@ -{ - "updateTime": "2020-01-30T13:21:23.253293Z", - "sources": [ - { - "generator": { - "name": "artman", - "version": "0.44.4", - "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "c1246a29e22b0f98e800a536b5b0da2d933a55f2", - "internalRef": "292310790", - "log": "c1246a29e22b0f98e800a536b5b0da2d933a55f2\nUpdating v1 protos with the latest inline documentation (in comments) and config options. Also adding a per-service .yaml file.\n\nPiperOrigin-RevId: 292310790\n\nb491d07cadaae7cde5608321f913e5ca1459b32d\nRevert accidental local_repository change\n\nPiperOrigin-RevId: 292245373\n\naf3400a8cb6110025198b59a0f7d018ae3cda700\nUpdate gapic-generator dependency (prebuilt PHP binary support).\n\nPiperOrigin-RevId: 292243997\n\n341fd5690fae36f36cf626ef048fbcf4bbe7cee6\ngrafeas: v1 add resource_definition for the grafeas.io/Project and change references for Project.\n\nPiperOrigin-RevId: 292221998\n\n42e915ec2ece1cd37a590fbcd10aa2c0fb0e5b06\nUpdate the gapic-generator, protoc-java-resource-name-plugin and protoc-docs-plugin to the latest commit.\n\nPiperOrigin-RevId: 292182368\n\nf035f47250675d31492a09f4a7586cfa395520a7\nFix grafeas build and update build.sh script to include gerafeas.\n\nPiperOrigin-RevId: 292168753\n\n26ccb214b7bc4a716032a6266bcb0a9ca55d6dbb\nasset: v1p1beta1 add client config annotations and retry config\n\nPiperOrigin-RevId: 292154210\n\n974ee5c0b5d03e81a50dafcedf41e0efebb5b749\nasset: v1beta1 add client config annotations\n\nPiperOrigin-RevId: 292152573\n\ncf3b61102ed5f36b827bc82ec39be09525f018c8\n Fix to protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 292034635\n\n4e1cfaa7c0fede9e65d64213ca3da1b1255816c0\nUpdate the public proto to support UTF-8 encoded id for CatalogService API, increase the ListCatalogItems deadline to 300s and some minor documentation change\n\nPiperOrigin-RevId: 292030970\n\n9c483584f8fd5a1b862ae07973f4cc7bb3e46648\nasset: add annotations to v1p1beta1\n\nPiperOrigin-RevId: 292009868\n\ne19209fac29731d0baf6d9ac23da1164f7bdca24\nAdd the google.rpc.context.AttributeContext message to the open source\ndirectories.\n\nPiperOrigin-RevId: 291999930\n\nae5662960573f279502bf98a108a35ba1175e782\noslogin API: move file level option on top of the file to avoid protobuf.js bug.\n\nPiperOrigin-RevId: 291990506\n\neba3897fff7c49ed85d3c47fc96fe96e47f6f684\nAdd cc_proto_library and cc_grpc_library targets for Spanner and IAM protos.\n\nPiperOrigin-RevId: 291988651\n\n8e981acfd9b97ea2f312f11bbaa7b6c16e412dea\nBeta launch for PersonDetection and FaceDetection features.\n\nPiperOrigin-RevId: 291821782\n\n994e067fae3b21e195f7da932b08fff806d70b5d\nasset: add annotations to v1p2beta1\n\nPiperOrigin-RevId: 291815259\n\n244e1d2c89346ca2e0701b39e65552330d68545a\nAdd Playable Locations service\n\nPiperOrigin-RevId: 291806349\n\n909f8f67963daf45dd88d020877fb9029b76788d\nasset: add annotations to v1beta2\n\nPiperOrigin-RevId: 291805301\n\n3c39a1d6e23c1ef63c7fba4019c25e76c40dfe19\nKMS: add file-level message for CryptoKeyPath, it is defined in gapic yaml but not\nin proto files.\n\nPiperOrigin-RevId: 291420695\n\nc6f3f350b8387f8d1b85ed4506f30187ebaaddc3\ncontaineranalysis: update v1beta1 and bazel build with annotations\n\nPiperOrigin-RevId: 291401900\n\n92887d74b44e4e636252b7b8477d0d2570cd82db\nfix: fix the location of grpc config file.\n\nPiperOrigin-RevId: 291396015\n\ne26cab8afd19d396b929039dac5d874cf0b5336c\nexpr: add default_host and method_signature annotations to CelService\n\nPiperOrigin-RevId: 291240093\n\n06093ae3952441c34ec176d1f7431b8765cec0be\nirm: fix v1alpha2 bazel build by adding missing proto imports\n\nPiperOrigin-RevId: 291227940\n\na8a2514af326e4673063f9a3c9d0ef1091c87e6c\nAdd proto annotation for cloud/irm API\n\nPiperOrigin-RevId: 291217859\n\n8d16f76de065f530d395a4c7eabbf766d6a120fd\nGenerate Memcache v1beta2 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 291008516\n\n3af1dabd93df9a9f17bf3624d3b875c11235360b\ngrafeas: Add containeranalysis default_host to Grafeas service\n\nPiperOrigin-RevId: 290965849\n\nbe2663fa95e31cba67d0cd62611a6674db9f74b7\nfix(google/maps/roads): add missing opening bracket\n\nPiperOrigin-RevId: 290964086\n\nfacc26550a0af0696e0534bc9cae9df14275aa7c\nUpdating v2 protos with the latest inline documentation (in comments) and adding a per-service .yaml file.\n\nPiperOrigin-RevId: 290952261\n\ncda99c1f7dc5e4ca9b1caeae1dc330838cbc1461\nChange api_name to 'asset' for v1p1beta1\n\nPiperOrigin-RevId: 290800639\n\n94e9e90c303a820ce40643d9129e7f0d2054e8a1\nAdds Google Maps Road service\n\nPiperOrigin-RevId: 290795667\n\na3b23dcb2eaecce98c600c7d009451bdec52dbda\nrpc: new message ErrorInfo, other comment updates\n\nPiperOrigin-RevId: 290781668\n\n26420ef4e46c37f193c0fbe53d6ebac481de460e\nAdd proto definition for Org Policy v1.\n\nPiperOrigin-RevId: 290771923\n\n7f0dab8177cf371ae019a082e2512de7ac102888\nPublish Routes Preferred API v1 service definitions.\n\nPiperOrigin-RevId: 290326986\n\nad6e508d0728e1d1bca6e3f328cd562718cb772d\nFix: Qualify resource type references with \"jobs.googleapis.com/\"\n\nPiperOrigin-RevId: 290285762\n\n58e770d568a2b78168ddc19a874178fee8265a9d\ncts client library\n\nPiperOrigin-RevId: 290146169\n\naf9daa4c3b4c4a8b7133b81588dd9ffd37270af2\nAdd more programming language options to public proto\n\nPiperOrigin-RevId: 290144091\n\nd9f2bbf2df301ef84641d4cec7c828736a0bd907\ntalent: add missing resource.proto dep to Bazel build target\n\nPiperOrigin-RevId: 290143164\n\n3b3968237451d027b42471cd28884a5a1faed6c7\nAnnotate Talent API.\nAdd gRPC service config for retry.\nUpdate bazel file with google.api.resource dependency.\n\nPiperOrigin-RevId: 290125172\n\n0735b4b096872960568d1f366bfa75b7b0e1f1a3\nWeekly library update.\n\nPiperOrigin-RevId: 289939042\n\n8760d3d9a4543d7f9c0d1c7870aca08b116e4095\nWeekly library update.\n\nPiperOrigin-RevId: 289939020\n\n8607df842f782a901805187e02fff598145b0b0e\nChange Talent API timeout to 30s.\n\nPiperOrigin-RevId: 289912621\n\n908155991fe32570653bcb72ecfdcfc896642f41\nAdd Recommendations AI V1Beta1\n\nPiperOrigin-RevId: 289901914\n\n5c9a8c2bebd8b71aa66d1cc473edfaac837a2c78\nAdding no-arg method signatures for ListBillingAccounts and ListServices\n\nPiperOrigin-RevId: 289891136\n\n50b0e8286ac988b0593bd890eb31fef6ea2f5767\nlongrunning: add grpc service config and default_host annotation to operations.proto\n\nPiperOrigin-RevId: 289876944\n\n6cac27dabe51c54807b0401698c32d34998948a9\n Updating default deadline for Cloud Security Command Center's v1 APIs.\n\nPiperOrigin-RevId: 289875412\n\nd99df0d67057a233c711187e0689baa4f8e6333d\nFix: Correct spelling in C# namespace option\n\nPiperOrigin-RevId: 289709813\n\n2fa8d48165cc48e35b0c62e6f7bdade12229326c\nfeat: Publish Recommender v1 to GitHub.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289619243\n\n9118db63d1ab493a2e44a3b4973fde810a835c49\nfirestore: don't retry reads that fail with Aborted\n\nFor transaction reads that fail with ABORTED, we need to rollback and start a new transaction. Our current configuration makes it so that GAPIC retries ABORTED reads multiple times without making any progress. Instead, we should retry at the transaction level.\n\nPiperOrigin-RevId: 289532382\n\n1dbfd3fe4330790b1e99c0bb20beb692f1e20b8a\nFix bazel build\nAdd other langauges (Java was already there) for bigquery/storage/v1alpha2 api.\n\nPiperOrigin-RevId: 289519766\n\nc06599cdd7d11f8d3fd25f8d3249e5bb1a3d5d73\nInitial commit of google.cloud.policytroubleshooter API, The API helps in troubleshooting GCP policies. Refer https://cloud.google.com/iam/docs/troubleshooting-access for more information\n\nPiperOrigin-RevId: 289491444\n\nfce7d80fa16ea241e87f7bc33d68595422e94ecd\nDo not pass samples option for Artman config of recommender v1 API.\n\nPiperOrigin-RevId: 289477403\n\nef179e8c61436297e6bb124352e47e45c8c80cb1\nfix: Address missing Bazel dependency.\n\nBazel builds stopped working in 06ec6d5 because\nthe google/longrunning/operations.proto file took\nan import from google/api/client.proto, but that\nimport was not added to BUILD.bazel.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446074\n\n8841655b242c84fd691d77d7bcf21b61044f01ff\nMigrate Data Labeling v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446026\n\n06ec6d5d053fff299eaa6eaa38afdd36c5e2fc68\nAdd annotations to google.longrunning.v1\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289413169\n\n0480cf40be1d3cc231f4268a2fdb36a8dd60e641\nMigrate IAM Admin v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289411084\n\n1017173e9adeb858587639af61889ad970c728b1\nSpecify a C# namespace for BigQuery Connection v1beta1\n\nPiperOrigin-RevId: 289396763\n\nb08714b378e8e5b0c4ecdde73f92c36d6303b4b6\nfix: Integrate latest proto-docs-plugin fix.\nFixes dialogflow v2\n\nPiperOrigin-RevId: 289189004\n\n51217a67e79255ee1f2e70a6a3919df082513327\nCreate BUILD file for recommender v1\n\nPiperOrigin-RevId: 289183234\n\nacacd87263c0a60e458561b8b8ce9f67c760552a\nGenerate recommender v1 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 289177510\n\n9d2f7133b97720b1fa3601f6dcd30760ba6d8a1e\nFix kokoro build script\n\nPiperOrigin-RevId: 289166315\n\nc43a67530d2a47a0220cad20ca8de39b3fbaf2c5\ncloudtasks: replace missing RPC timeout config for v2beta2 and v2beta3\n\nPiperOrigin-RevId: 289162391\n\n4cefc229a9197236fc0adf02d69b71c0c5cf59de\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 289158456\n\n56f263fe959c50786dab42e3c61402d32d1417bd\nCatalog API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 289149879\n\n4543762b23a57fc3c53d409efc3a9affd47b6ab3\nFix Bazel build\nbilling/v1 and dialogflow/v2 remain broken (not bazel-related issues).\nBilling has wrong configuration, dialogflow failure is caused by a bug in documentation plugin.\n\nPiperOrigin-RevId: 289140194\n\nc9dce519127b97e866ca133a01157f4ce27dcceb\nUpdate Bigtable docs\n\nPiperOrigin-RevId: 289114419\n\n802c5c5f2bf94c3facb011267d04e71942e0d09f\nMigrate DLP to proto annotations (but not GAPIC v2).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289102579\n\n6357f30f2ec3cff1d8239d18b707ff9d438ea5da\nRemove gRPC configuration file that was in the wrong place.\n\nPiperOrigin-RevId: 289096111\n\n360a8792ed62f944109d7e22d613a04a010665b4\n Protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 289011995\n\na79211c20c4f2807eec524d00123bf7c06ad3d6e\nRoll back containeranalysis v1 to GAPIC v1.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288999068\n\n9e60345ba603e03484a8aaa33ce5ffa19c1c652b\nPublish Routes Preferred API v1 proto definitions.\n\nPiperOrigin-RevId: 288941399\n\nd52885b642ad2aa1f42b132ee62dbf49a73e1e24\nMigrate the service management API to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288909426\n\n6ace586805c08896fef43e28a261337fcf3f022b\ncloudtasks: replace missing RPC timeout config\n\nPiperOrigin-RevId: 288783603\n\n51d906cabee4876b12497054b15b05d4a50ad027\nImport of Grafeas from Github.\n\nUpdate BUILD.bazel accordingly.\n\nPiperOrigin-RevId: 288783426\n\n5ef42bcd363ba0440f0ee65b3c80b499e9067ede\nMigrate Recommender v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288713066\n\n94f986afd365b7d7e132315ddcd43d7af0e652fb\nMigrate Container Analysis v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288708382\n\n7a751a279184970d3b6ba90e4dd4d22a382a0747\nRemove Container Analysis v1alpha1 (nobody publishes it).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288707473\n\n3c0d9c71242e70474b2b640e15bb0a435fd06ff0\nRemove specious annotation from BigQuery Data Transfer before\nanyone accidentally does anything that uses it.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288701604\n\n1af307a4764bd415ef942ac5187fa1def043006f\nMigrate BigQuery Connection to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288698681\n\n" - } - }, - { - "template": { - "name": "python_library", - "origin": "synthtool.gcp", - "version": "2019.10.17" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "dataproc", - "apiVersion": "v1beta2", - "language": "python", - "generator": "gapic", - "config": "google/cloud/dataproc/artman_dataproc_v1beta2.yaml" - } - }, - { - "client": { - "source": "googleapis", - "apiName": "dataproc", - "apiVersion": "v1", - "language": "python", - "generator": "gapic", - "config": "google/cloud/dataproc/artman_dataproc_v1.yaml" - } - } - ], - "newFiles": [ - { - "path": ".coveragerc" - }, - { - "path": ".flake8" - }, - { - "path": ".repo-metadata.json" - }, - { - "path": "CHANGELOG.md" - }, - { - "path": "LICENSE" - }, - { - "path": "MANIFEST.in" - }, - { - "path": "README.rst" - }, - { - "path": "docs/README.rst" - }, - { - "path": "docs/_static/custom.css" - }, - { - "path": "docs/_templates/layout.html" - }, - { - "path": "docs/changelog.md" - }, - { - "path": "docs/conf.py" - }, - { - "path": "docs/gapic/v1/api.rst" - }, - { - "path": "docs/gapic/v1/types.rst" - }, - { - "path": "docs/gapic/v1beta2/api.rst" - }, - { - "path": "docs/gapic/v1beta2/types.rst" - }, - { - "path": "docs/index.rst" - }, - { - "path": "google/__init__.py" - }, - { - "path": "google/cloud/__init__.py" - }, - { - "path": "google/cloud/dataproc.py" - }, - { - "path": "google/cloud/dataproc_v1/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/cluster_controller_client.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/cluster_controller_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/enums.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/job_controller_client.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/job_controller_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/transports/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/workflow_template_service_client.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/workflow_template_service_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/autoscaling_policies.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/autoscaling_policies_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/clusters.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/clusters_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/jobs.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/jobs_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/operations.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/operations_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/operations_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/shared.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/shared_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/shared_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/workflow_templates.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/workflow_templates_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/workflow_templates_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/types.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/cluster_controller_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/enums.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/job_controller_client.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/job_controller_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/clusters.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/clusters_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/jobs.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/jobs_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/operations.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/operations_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/operations_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/shared.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/shared_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/shared_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/workflow_templates.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/types.py" - }, - { - "path": "noxfile.py" - }, - { - "path": "setup.cfg" - }, - { - "path": "setup.py" - }, - { - "path": "synth.metadata" - }, - { - "path": "synth.py" - }, - { - "path": "tests/system/gapic/v1/test_system_cluster_controller_v1.py" - }, - { - "path": "tests/system/gapic/v1beta2/test_system_cluster_controller_v1beta2.py" - }, - { - "path": "tests/unit/gapic/v1/test_cluster_controller_client_v1.py" - }, - { - "path": "tests/unit/gapic/v1/test_job_controller_client_v1.py" - }, - { - "path": "tests/unit/gapic/v1/test_workflow_template_service_client_v1.py" - }, - { - "path": "tests/unit/gapic/v1beta2/test_autoscaling_policy_service_client_v1beta2.py" - }, - { - "path": "tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py" - }, - { - "path": "tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py" - }, - { - "path": "tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py" - } - ] -} \ No newline at end of file diff --git a/dataproc/synth.py b/dataproc/synth.py deleted file mode 100644 index a2c50b0e4a86..000000000000 --- a/dataproc/synth.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import re - -import synthtool as s -from synthtool import gcp - -gapic = gcp.GAPICGenerator() -common = gcp.CommonTemplates() -versions = ["v1beta2", "v1"] - -# ---------------------------------------------------------------------------- -# Generate dataproc GAPIC layer -# ---------------------------------------------------------------------------- -for version in versions: - library = gapic.py_library("dataproc", version, include_protos=True,) - s.move(library, excludes=["docs/index.rst", "nox.py", "README.rst", "setup.py"]) - - s.replace( - f"google/cloud/dataproc_{version}/gapic/cluster_controller_client.py", - "metadata_type=operations_pb2.ClusterOperationMetadata,", - "metadata_type=proto_operations_pb2.ClusterOperationMetadata,", - ) - - s.replace( - f"google/cloud/dataproc_{version}/gapic/cluster_controller_client.py", - "\s+Note:.*\n(.*\n)+?.*types.FieldMask.", - f""" - - - .. note:: - - Currently, only the following fields can be updated: - - * ``labels``: Update labels - * ``config.worker_config.num_instances``: Resize primary - worker group - * ``config.secondary_worker_config.num_instances``: Resize - secondary worker group - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dataproc_{version}.types.FieldMask`""", - ) - - s.replace( - f'google/cloud/dataproc_{version}/proto/workflow_templates_pb2.py', - ', and must\n\s+conform to the following PCRE regular expression:' - '(.*\n)+?.*No more than 32', - '. Label values must be between\n' - ' 1 and 63 characters long. No more than 32' - ) - s.replace( - f'google/cloud/dataproc_{version}/proto/workflow_templates_pb2.py', - ', and must conform to\n' - '\s+the following regular expression:(.*\n)+?.* No more than', - '. Label values must be between\n' - ' 1 and 63 characters long. No more than' - ) - -s.replace( - "google/cloud/dataproc_v1beta2/proto/clusters_pb2.py", - "# Generated by the protocol buffer compiler. DO NOT EDIT!", - "# -*- coding: utf-8 -*-\n" - "# Generated by the protocol buffer compiler. DO NOT EDIT!", -) - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=97, cov_level=100) -s.move(templated_files) - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/dataproc/tests/system/gapic/v1/test_system_cluster_controller_v1.py b/dataproc/tests/system/gapic/v1/test_system_cluster_controller_v1.py deleted file mode 100644 index a595af5bcf96..000000000000 --- a/dataproc/tests/system/gapic/v1/test_system_cluster_controller_v1.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import time - -from google.cloud import dataproc_v1 -from google.cloud.dataproc_v1.proto import clusters_pb2 - - -class TestSystemClusterController(object): - def test_list_clusters(self): - project_id = os.environ["PROJECT_ID"] - - client = dataproc_v1.ClusterControllerClient() - project_id_2 = project_id - region = "global" - response = client.list_clusters(project_id_2, region) diff --git a/dataproc/tests/system/gapic/v1beta2/test_system_cluster_controller_v1beta2.py b/dataproc/tests/system/gapic/v1beta2/test_system_cluster_controller_v1beta2.py deleted file mode 100644 index 8db97e8c9b52..000000000000 --- a/dataproc/tests/system/gapic/v1beta2/test_system_cluster_controller_v1beta2.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import time - -from google.cloud import dataproc_v1beta2 -from google.cloud.dataproc_v1beta2.proto import clusters_pb2 - - -class TestSystemClusterController(object): - def test_list_clusters(self): - project_id = os.environ["PROJECT_ID"] - - client = dataproc_v1beta2.ClusterControllerClient() - project_id_2 = project_id - region = "global" - response = client.list_clusters(project_id_2, region) diff --git a/dataproc/tests/unit/gapic/v1/test_cluster_controller_client_v1.py b/dataproc/tests/unit/gapic/v1/test_cluster_controller_client_v1.py deleted file mode 100644 index 1c15fdcf86ad..000000000000 --- a/dataproc/tests/unit/gapic/v1/test_cluster_controller_client_v1.py +++ /dev/null @@ -1,413 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.rpc import status_pb2 - -from google.cloud import dataproc_v1 -from google.cloud.dataproc_v1.proto import clusters_pb2 -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestClusterControllerClient(object): - def test_create_cluster(self): - # Setup Expected Response - project_id_2 = "projectId2939242356" - cluster_name = "clusterName-1018081872" - cluster_uuid = "clusterUuid-1017854240" - expected_response = { - "project_id": project_id_2, - "cluster_name": cluster_name, - "cluster_uuid": cluster_uuid, - } - expected_response = clusters_pb2.Cluster(**expected_response) - operation = operations_pb2.Operation( - name="operations/test_create_cluster", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster = {} - - response = client.create_cluster(project_id, region, cluster) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = clusters_pb2.CreateClusterRequest( - project_id=project_id, region=region, cluster=cluster - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_cluster_exception(self): - # Setup Response - error = status_pb2.Status() - operation = operations_pb2.Operation( - name="operations/test_create_cluster_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster = {} - - response = client.create_cluster(project_id, region, cluster) - exception = response.exception() - assert exception.errors[0] == error - - def test_update_cluster(self): - # Setup Expected Response - project_id_2 = "projectId2939242356" - cluster_name_2 = "clusterName2875867491" - cluster_uuid = "clusterUuid-1017854240" - expected_response = { - "project_id": project_id_2, - "cluster_name": cluster_name_2, - "cluster_uuid": cluster_uuid, - } - expected_response = clusters_pb2.Cluster(**expected_response) - operation = operations_pb2.Operation( - name="operations/test_update_cluster", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - cluster = {} - update_mask = {} - - response = client.update_cluster( - project_id, region, cluster_name, cluster, update_mask - ) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = clusters_pb2.UpdateClusterRequest( - project_id=project_id, - region=region, - cluster_name=cluster_name, - cluster=cluster, - update_mask=update_mask, - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_cluster_exception(self): - # Setup Response - error = status_pb2.Status() - operation = operations_pb2.Operation( - name="operations/test_update_cluster_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - cluster = {} - update_mask = {} - - response = client.update_cluster( - project_id, region, cluster_name, cluster, update_mask - ) - exception = response.exception() - assert exception.errors[0] == error - - def test_delete_cluster(self): - # Setup Expected Response - expected_response = {} - expected_response = empty_pb2.Empty(**expected_response) - operation = operations_pb2.Operation( - name="operations/test_delete_cluster", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - - response = client.delete_cluster(project_id, region, cluster_name) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = clusters_pb2.DeleteClusterRequest( - project_id=project_id, region=region, cluster_name=cluster_name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_cluster_exception(self): - # Setup Response - error = status_pb2.Status() - operation = operations_pb2.Operation( - name="operations/test_delete_cluster_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - - response = client.delete_cluster(project_id, region, cluster_name) - exception = response.exception() - assert exception.errors[0] == error - - def test_get_cluster(self): - # Setup Expected Response - project_id_2 = "projectId2939242356" - cluster_name_2 = "clusterName2875867491" - cluster_uuid = "clusterUuid-1017854240" - expected_response = { - "project_id": project_id_2, - "cluster_name": cluster_name_2, - "cluster_uuid": cluster_uuid, - } - expected_response = clusters_pb2.Cluster(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - - response = client.get_cluster(project_id, region, cluster_name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = clusters_pb2.GetClusterRequest( - project_id=project_id, region=region, cluster_name=cluster_name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_cluster_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.ClusterControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - - with pytest.raises(CustomException): - client.get_cluster(project_id, region, cluster_name) - - def test_list_clusters(self): - # Setup Expected Response - next_page_token = "" - clusters_element = {} - clusters = [clusters_element] - expected_response = {"next_page_token": next_page_token, "clusters": clusters} - expected_response = clusters_pb2.ListClustersResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - - paged_list_response = client.list_clusters(project_id, region) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.clusters[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = clusters_pb2.ListClustersRequest( - project_id=project_id, region=region - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_clusters_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.ClusterControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - - paged_list_response = client.list_clusters(project_id, region) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_diagnose_cluster(self): - # Setup Expected Response - expected_response = {} - expected_response = empty_pb2.Empty(**expected_response) - operation = operations_pb2.Operation( - name="operations/test_diagnose_cluster", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - - response = client.diagnose_cluster(project_id, region, cluster_name) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = clusters_pb2.DiagnoseClusterRequest( - project_id=project_id, region=region, cluster_name=cluster_name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_diagnose_cluster_exception(self): - # Setup Response - error = status_pb2.Status() - operation = operations_pb2.Operation( - name="operations/test_diagnose_cluster_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - - response = client.diagnose_cluster(project_id, region, cluster_name) - exception = response.exception() - assert exception.errors[0] == error diff --git a/dataproc/tests/unit/gapic/v1/test_job_controller_client_v1.py b/dataproc/tests/unit/gapic/v1/test_job_controller_client_v1.py deleted file mode 100644 index 3508c7e23340..000000000000 --- a/dataproc/tests/unit/gapic/v1/test_job_controller_client_v1.py +++ /dev/null @@ -1,355 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import dataproc_v1 -from google.cloud.dataproc_v1.proto import jobs_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestJobControllerClient(object): - def test_submit_job(self): - # Setup Expected Response - driver_output_resource_uri = "driverOutputResourceUri-542229086" - driver_control_files_uri = "driverControlFilesUri207057643" - job_uuid = "jobUuid-1615012099" - expected_response = { - "driver_output_resource_uri": driver_output_resource_uri, - "driver_control_files_uri": driver_control_files_uri, - "job_uuid": job_uuid, - } - expected_response = jobs_pb2.Job(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.JobControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - job = {} - - response = client.submit_job(project_id, region, job) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = jobs_pb2.SubmitJobRequest( - project_id=project_id, region=region, job=job - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_submit_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.JobControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - job = {} - - with pytest.raises(CustomException): - client.submit_job(project_id, region, job) - - def test_get_job(self): - # Setup Expected Response - driver_output_resource_uri = "driverOutputResourceUri-542229086" - driver_control_files_uri = "driverControlFilesUri207057643" - job_uuid = "jobUuid-1615012099" - expected_response = { - "driver_output_resource_uri": driver_output_resource_uri, - "driver_control_files_uri": driver_control_files_uri, - "job_uuid": job_uuid, - } - expected_response = jobs_pb2.Job(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.JobControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - - response = client.get_job(project_id, region, job_id) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = jobs_pb2.GetJobRequest( - project_id=project_id, region=region, job_id=job_id - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.JobControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - - with pytest.raises(CustomException): - client.get_job(project_id, region, job_id) - - def test_list_jobs(self): - # Setup Expected Response - next_page_token = "" - jobs_element = {} - jobs = [jobs_element] - expected_response = {"next_page_token": next_page_token, "jobs": jobs} - expected_response = jobs_pb2.ListJobsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.JobControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - - paged_list_response = client.list_jobs(project_id, region) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.jobs[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = jobs_pb2.ListJobsRequest( - project_id=project_id, region=region - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_jobs_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.JobControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - - paged_list_response = client.list_jobs(project_id, region) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_update_job(self): - # Setup Expected Response - driver_output_resource_uri = "driverOutputResourceUri-542229086" - driver_control_files_uri = "driverControlFilesUri207057643" - job_uuid = "jobUuid-1615012099" - expected_response = { - "driver_output_resource_uri": driver_output_resource_uri, - "driver_control_files_uri": driver_control_files_uri, - "job_uuid": job_uuid, - } - expected_response = jobs_pb2.Job(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.JobControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - job = {} - update_mask = {} - - response = client.update_job(project_id, region, job_id, job, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = jobs_pb2.UpdateJobRequest( - project_id=project_id, - region=region, - job_id=job_id, - job=job, - update_mask=update_mask, - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.JobControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - job = {} - update_mask = {} - - with pytest.raises(CustomException): - client.update_job(project_id, region, job_id, job, update_mask) - - def test_cancel_job(self): - # Setup Expected Response - driver_output_resource_uri = "driverOutputResourceUri-542229086" - driver_control_files_uri = "driverControlFilesUri207057643" - job_uuid = "jobUuid-1615012099" - expected_response = { - "driver_output_resource_uri": driver_output_resource_uri, - "driver_control_files_uri": driver_control_files_uri, - "job_uuid": job_uuid, - } - expected_response = jobs_pb2.Job(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.JobControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - - response = client.cancel_job(project_id, region, job_id) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = jobs_pb2.CancelJobRequest( - project_id=project_id, region=region, job_id=job_id - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_cancel_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.JobControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - - with pytest.raises(CustomException): - client.cancel_job(project_id, region, job_id) - - def test_delete_job(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.JobControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - - client.delete_job(project_id, region, job_id) - - assert len(channel.requests) == 1 - expected_request = jobs_pb2.DeleteJobRequest( - project_id=project_id, region=region, job_id=job_id - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.JobControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - - with pytest.raises(CustomException): - client.delete_job(project_id, region, job_id) diff --git a/dataproc/tests/unit/gapic/v1/test_workflow_template_service_client_v1.py b/dataproc/tests/unit/gapic/v1/test_workflow_template_service_client_v1.py deleted file mode 100644 index 5ed3c69e7e6a..000000000000 --- a/dataproc/tests/unit/gapic/v1/test_workflow_template_service_client_v1.py +++ /dev/null @@ -1,390 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.rpc import status_pb2 - -from google.cloud import dataproc_v1 -from google.cloud.dataproc_v1.proto import workflow_templates_pb2 -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestWorkflowTemplateServiceClient(object): - def test_create_workflow_template(self): - # Setup Expected Response - id_ = "id3355" - name = "name3373707" - version = 351608024 - expected_response = {"id": id_, "name": name, "version": version} - expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup Request - parent = client.region_path("[PROJECT]", "[REGION]") - template = {} - - response = client.create_workflow_template(parent, template) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.CreateWorkflowTemplateRequest( - parent=parent, template=template - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_workflow_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup request - parent = client.region_path("[PROJECT]", "[REGION]") - template = {} - - with pytest.raises(CustomException): - client.create_workflow_template(parent, template) - - def test_get_workflow_template(self): - # Setup Expected Response - id_ = "id3355" - name_2 = "name2-1052831874" - version = 351608024 - expected_response = {"id": id_, "name": name_2, "version": version} - expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup Request - name = client.workflow_template_path( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]" - ) - - response = client.get_workflow_template(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.GetWorkflowTemplateRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_workflow_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup request - name = client.workflow_template_path( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]" - ) - - with pytest.raises(CustomException): - client.get_workflow_template(name) - - def test_instantiate_workflow_template(self): - # Setup Expected Response - expected_response = {} - expected_response = empty_pb2.Empty(**expected_response) - operation = operations_pb2.Operation( - name="operations/test_instantiate_workflow_template", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup Request - name = client.workflow_template_path( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]" - ) - - response = client.instantiate_workflow_template(name) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.InstantiateWorkflowTemplateRequest( - name=name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_instantiate_workflow_template_exception(self): - # Setup Response - error = status_pb2.Status() - operation = operations_pb2.Operation( - name="operations/test_instantiate_workflow_template_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup Request - name = client.workflow_template_path( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]" - ) - - response = client.instantiate_workflow_template(name) - exception = response.exception() - assert exception.errors[0] == error - - def test_instantiate_inline_workflow_template(self): - # Setup Expected Response - expected_response = {} - expected_response = empty_pb2.Empty(**expected_response) - operation = operations_pb2.Operation( - name="operations/test_instantiate_inline_workflow_template", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup Request - parent = client.region_path("[PROJECT]", "[REGION]") - template = {} - - response = client.instantiate_inline_workflow_template(parent, template) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.InstantiateInlineWorkflowTemplateRequest( - parent=parent, template=template - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_instantiate_inline_workflow_template_exception(self): - # Setup Response - error = status_pb2.Status() - operation = operations_pb2.Operation( - name="operations/test_instantiate_inline_workflow_template_exception", - done=True, - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup Request - parent = client.region_path("[PROJECT]", "[REGION]") - template = {} - - response = client.instantiate_inline_workflow_template(parent, template) - exception = response.exception() - assert exception.errors[0] == error - - def test_update_workflow_template(self): - # Setup Expected Response - id_ = "id3355" - name = "name3373707" - version = 351608024 - expected_response = {"id": id_, "name": name, "version": version} - expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup Request - template = {} - - response = client.update_workflow_template(template) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.UpdateWorkflowTemplateRequest( - template=template - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_workflow_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup request - template = {} - - with pytest.raises(CustomException): - client.update_workflow_template(template) - - def test_list_workflow_templates(self): - # Setup Expected Response - next_page_token = "" - templates_element = {} - templates = [templates_element] - expected_response = {"next_page_token": next_page_token, "templates": templates} - expected_response = workflow_templates_pb2.ListWorkflowTemplatesResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup Request - parent = client.region_path("[PROJECT]", "[REGION]") - - paged_list_response = client.list_workflow_templates(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.templates[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.ListWorkflowTemplatesRequest( - parent=parent - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_workflow_templates_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup request - parent = client.region_path("[PROJECT]", "[REGION]") - - paged_list_response = client.list_workflow_templates(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_workflow_template(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup Request - name = client.workflow_template_path( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]" - ) - - client.delete_workflow_template(name) - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.DeleteWorkflowTemplateRequest( - name=name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_workflow_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Setup request - name = client.workflow_template_path( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]" - ) - - with pytest.raises(CustomException): - client.delete_workflow_template(name) diff --git a/dataproc/tests/unit/gapic/v1beta2/test_autoscaling_policy_service_client_v1beta2.py b/dataproc/tests/unit/gapic/v1beta2/test_autoscaling_policy_service_client_v1beta2.py deleted file mode 100644 index 2cc573a3e658..000000000000 --- a/dataproc/tests/unit/gapic/v1beta2/test_autoscaling_policy_service_client_v1beta2.py +++ /dev/null @@ -1,281 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import dataproc_v1beta2 -from google.cloud.dataproc_v1beta2.proto import autoscaling_policies_pb2 -from google.protobuf import empty_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestAutoscalingPolicyServiceClient(object): - def test_create_autoscaling_policy(self): - # Setup Expected Response - id_ = "id3355" - name = "name3373707" - expected_response = {"id": id_, "name": name} - expected_response = autoscaling_policies_pb2.AutoscalingPolicy( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - - # Setup Request - parent = client.region_path("[PROJECT]", "[REGION]") - policy = {} - - response = client.create_autoscaling_policy(parent, policy) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = autoscaling_policies_pb2.CreateAutoscalingPolicyRequest( - parent=parent, policy=policy - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_autoscaling_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - - # Setup request - parent = client.region_path("[PROJECT]", "[REGION]") - policy = {} - - with pytest.raises(CustomException): - client.create_autoscaling_policy(parent, policy) - - def test_update_autoscaling_policy(self): - # Setup Expected Response - id_ = "id3355" - name = "name3373707" - expected_response = {"id": id_, "name": name} - expected_response = autoscaling_policies_pb2.AutoscalingPolicy( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - - # Setup Request - policy = {} - - response = client.update_autoscaling_policy(policy) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = autoscaling_policies_pb2.UpdateAutoscalingPolicyRequest( - policy=policy - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_autoscaling_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - - # Setup request - policy = {} - - with pytest.raises(CustomException): - client.update_autoscaling_policy(policy) - - def test_get_autoscaling_policy(self): - # Setup Expected Response - id_ = "id3355" - name_2 = "name2-1052831874" - expected_response = {"id": id_, "name": name_2} - expected_response = autoscaling_policies_pb2.AutoscalingPolicy( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - - # Setup Request - name = client.autoscaling_policy_path( - "[PROJECT]", "[REGION]", "[AUTOSCALING_POLICY]" - ) - - response = client.get_autoscaling_policy(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = autoscaling_policies_pb2.GetAutoscalingPolicyRequest( - name=name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_autoscaling_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - - # Setup request - name = client.autoscaling_policy_path( - "[PROJECT]", "[REGION]", "[AUTOSCALING_POLICY]" - ) - - with pytest.raises(CustomException): - client.get_autoscaling_policy(name) - - def test_list_autoscaling_policies(self): - # Setup Expected Response - next_page_token = "" - policies_element = {} - policies = [policies_element] - expected_response = {"next_page_token": next_page_token, "policies": policies} - expected_response = autoscaling_policies_pb2.ListAutoscalingPoliciesResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - - # Setup Request - parent = client.region_path("[PROJECT]", "[REGION]") - - paged_list_response = client.list_autoscaling_policies(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.policies[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = autoscaling_policies_pb2.ListAutoscalingPoliciesRequest( - parent=parent - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_autoscaling_policies_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - - # Setup request - parent = client.region_path("[PROJECT]", "[REGION]") - - paged_list_response = client.list_autoscaling_policies(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_autoscaling_policy(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - - # Setup Request - name = client.autoscaling_policy_path( - "[PROJECT]", "[REGION]", "[AUTOSCALING_POLICY]" - ) - - client.delete_autoscaling_policy(name) - - assert len(channel.requests) == 1 - expected_request = autoscaling_policies_pb2.DeleteAutoscalingPolicyRequest( - name=name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_autoscaling_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.AutoscalingPolicyServiceClient() - - # Setup request - name = client.autoscaling_policy_path( - "[PROJECT]", "[REGION]", "[AUTOSCALING_POLICY]" - ) - - with pytest.raises(CustomException): - client.delete_autoscaling_policy(name) diff --git a/dataproc/tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py b/dataproc/tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py deleted file mode 100644 index 7c75dc57ea3e..000000000000 --- a/dataproc/tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py +++ /dev/null @@ -1,413 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.rpc import status_pb2 - -from google.cloud import dataproc_v1beta2 -from google.cloud.dataproc_v1beta2.proto import clusters_pb2 -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestClusterControllerClient(object): - def test_create_cluster(self): - # Setup Expected Response - project_id_2 = "projectId2939242356" - cluster_name = "clusterName-1018081872" - cluster_uuid = "clusterUuid-1017854240" - expected_response = { - "project_id": project_id_2, - "cluster_name": cluster_name, - "cluster_uuid": cluster_uuid, - } - expected_response = clusters_pb2.Cluster(**expected_response) - operation = operations_pb2.Operation( - name="operations/test_create_cluster", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster = {} - - response = client.create_cluster(project_id, region, cluster) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = clusters_pb2.CreateClusterRequest( - project_id=project_id, region=region, cluster=cluster - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_cluster_exception(self): - # Setup Response - error = status_pb2.Status() - operation = operations_pb2.Operation( - name="operations/test_create_cluster_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster = {} - - response = client.create_cluster(project_id, region, cluster) - exception = response.exception() - assert exception.errors[0] == error - - def test_update_cluster(self): - # Setup Expected Response - project_id_2 = "projectId2939242356" - cluster_name_2 = "clusterName2875867491" - cluster_uuid = "clusterUuid-1017854240" - expected_response = { - "project_id": project_id_2, - "cluster_name": cluster_name_2, - "cluster_uuid": cluster_uuid, - } - expected_response = clusters_pb2.Cluster(**expected_response) - operation = operations_pb2.Operation( - name="operations/test_update_cluster", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - cluster = {} - update_mask = {} - - response = client.update_cluster( - project_id, region, cluster_name, cluster, update_mask - ) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = clusters_pb2.UpdateClusterRequest( - project_id=project_id, - region=region, - cluster_name=cluster_name, - cluster=cluster, - update_mask=update_mask, - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_cluster_exception(self): - # Setup Response - error = status_pb2.Status() - operation = operations_pb2.Operation( - name="operations/test_update_cluster_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - cluster = {} - update_mask = {} - - response = client.update_cluster( - project_id, region, cluster_name, cluster, update_mask - ) - exception = response.exception() - assert exception.errors[0] == error - - def test_delete_cluster(self): - # Setup Expected Response - expected_response = {} - expected_response = empty_pb2.Empty(**expected_response) - operation = operations_pb2.Operation( - name="operations/test_delete_cluster", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - - response = client.delete_cluster(project_id, region, cluster_name) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = clusters_pb2.DeleteClusterRequest( - project_id=project_id, region=region, cluster_name=cluster_name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_cluster_exception(self): - # Setup Response - error = status_pb2.Status() - operation = operations_pb2.Operation( - name="operations/test_delete_cluster_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - - response = client.delete_cluster(project_id, region, cluster_name) - exception = response.exception() - assert exception.errors[0] == error - - def test_get_cluster(self): - # Setup Expected Response - project_id_2 = "projectId2939242356" - cluster_name_2 = "clusterName2875867491" - cluster_uuid = "clusterUuid-1017854240" - expected_response = { - "project_id": project_id_2, - "cluster_name": cluster_name_2, - "cluster_uuid": cluster_uuid, - } - expected_response = clusters_pb2.Cluster(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - - response = client.get_cluster(project_id, region, cluster_name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = clusters_pb2.GetClusterRequest( - project_id=project_id, region=region, cluster_name=cluster_name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_cluster_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.ClusterControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - - with pytest.raises(CustomException): - client.get_cluster(project_id, region, cluster_name) - - def test_list_clusters(self): - # Setup Expected Response - next_page_token = "" - clusters_element = {} - clusters = [clusters_element] - expected_response = {"next_page_token": next_page_token, "clusters": clusters} - expected_response = clusters_pb2.ListClustersResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - - paged_list_response = client.list_clusters(project_id, region) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.clusters[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = clusters_pb2.ListClustersRequest( - project_id=project_id, region=region - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_clusters_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.ClusterControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - - paged_list_response = client.list_clusters(project_id, region) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_diagnose_cluster(self): - # Setup Expected Response - expected_response = {} - expected_response = empty_pb2.Empty(**expected_response) - operation = operations_pb2.Operation( - name="operations/test_diagnose_cluster", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - - response = client.diagnose_cluster(project_id, region, cluster_name) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = clusters_pb2.DiagnoseClusterRequest( - project_id=project_id, region=region, cluster_name=cluster_name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_diagnose_cluster_exception(self): - # Setup Response - error = status_pb2.Status() - operation = operations_pb2.Operation( - name="operations/test_diagnose_cluster_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.ClusterControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - cluster_name = "clusterName-1018081872" - - response = client.diagnose_cluster(project_id, region, cluster_name) - exception = response.exception() - assert exception.errors[0] == error diff --git a/dataproc/tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py b/dataproc/tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py deleted file mode 100644 index 00802240223e..000000000000 --- a/dataproc/tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py +++ /dev/null @@ -1,363 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import dataproc_v1beta2 -from google.cloud.dataproc_v1beta2.proto import jobs_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestJobControllerClient(object): - def test_submit_job(self): - # Setup Expected Response - submitted_by = "submittedBy-2047729125" - driver_output_resource_uri = "driverOutputResourceUri-542229086" - driver_control_files_uri = "driverControlFilesUri207057643" - job_uuid = "jobUuid-1615012099" - expected_response = { - "submitted_by": submitted_by, - "driver_output_resource_uri": driver_output_resource_uri, - "driver_control_files_uri": driver_control_files_uri, - "job_uuid": job_uuid, - } - expected_response = jobs_pb2.Job(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.JobControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - job = {} - - response = client.submit_job(project_id, region, job) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = jobs_pb2.SubmitJobRequest( - project_id=project_id, region=region, job=job - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_submit_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.JobControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - job = {} - - with pytest.raises(CustomException): - client.submit_job(project_id, region, job) - - def test_get_job(self): - # Setup Expected Response - submitted_by = "submittedBy-2047729125" - driver_output_resource_uri = "driverOutputResourceUri-542229086" - driver_control_files_uri = "driverControlFilesUri207057643" - job_uuid = "jobUuid-1615012099" - expected_response = { - "submitted_by": submitted_by, - "driver_output_resource_uri": driver_output_resource_uri, - "driver_control_files_uri": driver_control_files_uri, - "job_uuid": job_uuid, - } - expected_response = jobs_pb2.Job(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.JobControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - - response = client.get_job(project_id, region, job_id) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = jobs_pb2.GetJobRequest( - project_id=project_id, region=region, job_id=job_id - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.JobControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - - with pytest.raises(CustomException): - client.get_job(project_id, region, job_id) - - def test_list_jobs(self): - # Setup Expected Response - next_page_token = "" - jobs_element = {} - jobs = [jobs_element] - expected_response = {"next_page_token": next_page_token, "jobs": jobs} - expected_response = jobs_pb2.ListJobsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.JobControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - - paged_list_response = client.list_jobs(project_id, region) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.jobs[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = jobs_pb2.ListJobsRequest( - project_id=project_id, region=region - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_jobs_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.JobControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - - paged_list_response = client.list_jobs(project_id, region) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_update_job(self): - # Setup Expected Response - submitted_by = "submittedBy-2047729125" - driver_output_resource_uri = "driverOutputResourceUri-542229086" - driver_control_files_uri = "driverControlFilesUri207057643" - job_uuid = "jobUuid-1615012099" - expected_response = { - "submitted_by": submitted_by, - "driver_output_resource_uri": driver_output_resource_uri, - "driver_control_files_uri": driver_control_files_uri, - "job_uuid": job_uuid, - } - expected_response = jobs_pb2.Job(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.JobControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - job = {} - update_mask = {} - - response = client.update_job(project_id, region, job_id, job, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = jobs_pb2.UpdateJobRequest( - project_id=project_id, - region=region, - job_id=job_id, - job=job, - update_mask=update_mask, - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.JobControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - job = {} - update_mask = {} - - with pytest.raises(CustomException): - client.update_job(project_id, region, job_id, job, update_mask) - - def test_cancel_job(self): - # Setup Expected Response - submitted_by = "submittedBy-2047729125" - driver_output_resource_uri = "driverOutputResourceUri-542229086" - driver_control_files_uri = "driverControlFilesUri207057643" - job_uuid = "jobUuid-1615012099" - expected_response = { - "submitted_by": submitted_by, - "driver_output_resource_uri": driver_output_resource_uri, - "driver_control_files_uri": driver_control_files_uri, - "job_uuid": job_uuid, - } - expected_response = jobs_pb2.Job(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.JobControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - - response = client.cancel_job(project_id, region, job_id) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = jobs_pb2.CancelJobRequest( - project_id=project_id, region=region, job_id=job_id - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_cancel_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.JobControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - - with pytest.raises(CustomException): - client.cancel_job(project_id, region, job_id) - - def test_delete_job(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.JobControllerClient() - - # Setup Request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - - client.delete_job(project_id, region, job_id) - - assert len(channel.requests) == 1 - expected_request = jobs_pb2.DeleteJobRequest( - project_id=project_id, region=region, job_id=job_id - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.JobControllerClient() - - # Setup request - project_id = "projectId-1969970175" - region = "region-934795532" - job_id = "jobId-1154752291" - - with pytest.raises(CustomException): - client.delete_job(project_id, region, job_id) diff --git a/dataproc/tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py b/dataproc/tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py deleted file mode 100644 index bb7e0b4fcea1..000000000000 --- a/dataproc/tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py +++ /dev/null @@ -1,390 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.rpc import status_pb2 - -from google.cloud import dataproc_v1beta2 -from google.cloud.dataproc_v1beta2.proto import workflow_templates_pb2 -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestWorkflowTemplateServiceClient(object): - def test_create_workflow_template(self): - # Setup Expected Response - id_ = "id3355" - name = "name3373707" - version = 351608024 - expected_response = {"id": id_, "name": name, "version": version} - expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup Request - parent = client.region_path("[PROJECT]", "[REGION]") - template = {} - - response = client.create_workflow_template(parent, template) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.CreateWorkflowTemplateRequest( - parent=parent, template=template - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_workflow_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup request - parent = client.region_path("[PROJECT]", "[REGION]") - template = {} - - with pytest.raises(CustomException): - client.create_workflow_template(parent, template) - - def test_get_workflow_template(self): - # Setup Expected Response - id_ = "id3355" - name_2 = "name2-1052831874" - version = 351608024 - expected_response = {"id": id_, "name": name_2, "version": version} - expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup Request - name = client.workflow_template_path( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]" - ) - - response = client.get_workflow_template(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.GetWorkflowTemplateRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_workflow_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup request - name = client.workflow_template_path( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]" - ) - - with pytest.raises(CustomException): - client.get_workflow_template(name) - - def test_instantiate_workflow_template(self): - # Setup Expected Response - expected_response = {} - expected_response = empty_pb2.Empty(**expected_response) - operation = operations_pb2.Operation( - name="operations/test_instantiate_workflow_template", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup Request - name = client.workflow_template_path( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]" - ) - - response = client.instantiate_workflow_template(name) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.InstantiateWorkflowTemplateRequest( - name=name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_instantiate_workflow_template_exception(self): - # Setup Response - error = status_pb2.Status() - operation = operations_pb2.Operation( - name="operations/test_instantiate_workflow_template_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup Request - name = client.workflow_template_path( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]" - ) - - response = client.instantiate_workflow_template(name) - exception = response.exception() - assert exception.errors[0] == error - - def test_instantiate_inline_workflow_template(self): - # Setup Expected Response - expected_response = {} - expected_response = empty_pb2.Empty(**expected_response) - operation = operations_pb2.Operation( - name="operations/test_instantiate_inline_workflow_template", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup Request - parent = client.region_path("[PROJECT]", "[REGION]") - template = {} - - response = client.instantiate_inline_workflow_template(parent, template) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.InstantiateInlineWorkflowTemplateRequest( - parent=parent, template=template - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_instantiate_inline_workflow_template_exception(self): - # Setup Response - error = status_pb2.Status() - operation = operations_pb2.Operation( - name="operations/test_instantiate_inline_workflow_template_exception", - done=True, - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup Request - parent = client.region_path("[PROJECT]", "[REGION]") - template = {} - - response = client.instantiate_inline_workflow_template(parent, template) - exception = response.exception() - assert exception.errors[0] == error - - def test_update_workflow_template(self): - # Setup Expected Response - id_ = "id3355" - name = "name3373707" - version = 351608024 - expected_response = {"id": id_, "name": name, "version": version} - expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup Request - template = {} - - response = client.update_workflow_template(template) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.UpdateWorkflowTemplateRequest( - template=template - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_workflow_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup request - template = {} - - with pytest.raises(CustomException): - client.update_workflow_template(template) - - def test_list_workflow_templates(self): - # Setup Expected Response - next_page_token = "" - templates_element = {} - templates = [templates_element] - expected_response = {"next_page_token": next_page_token, "templates": templates} - expected_response = workflow_templates_pb2.ListWorkflowTemplatesResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup Request - parent = client.region_path("[PROJECT]", "[REGION]") - - paged_list_response = client.list_workflow_templates(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.templates[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.ListWorkflowTemplatesRequest( - parent=parent - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_workflow_templates_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup request - parent = client.region_path("[PROJECT]", "[REGION]") - - paged_list_response = client.list_workflow_templates(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_workflow_template(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup Request - name = client.workflow_template_path( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]" - ) - - client.delete_workflow_template(name) - - assert len(channel.requests) == 1 - expected_request = workflow_templates_pb2.DeleteWorkflowTemplateRequest( - name=name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_workflow_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dataproc_v1beta2.WorkflowTemplateServiceClient() - - # Setup request - name = client.workflow_template_path( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]" - ) - - with pytest.raises(CustomException): - client.delete_workflow_template(name) diff --git a/logging/.coveragerc b/logging/.coveragerc deleted file mode 100644 index b178b094aa1d..000000000000 --- a/logging/.coveragerc +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[run] -branch = True - -[report] -fail_under = 100 -show_missing = True -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file diff --git a/logging/.flake8 b/logging/.flake8 deleted file mode 100644 index 0268ecc9c55c..000000000000 --- a/logging/.flake8 +++ /dev/null @@ -1,14 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - *_pb2.py - - # Standard linting exemptions. - __pycache__, - .git, - *.pyc, - conf.py diff --git a/logging/.repo-metadata.json b/logging/.repo-metadata.json deleted file mode 100644 index 33d1b1f819ee..000000000000 --- a/logging/.repo-metadata.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "name": "logging", - "name_pretty": "Stackdriver Logging", - "product_documentation": "https://cloud.google.com/logging/docs", - "client_documentation": "https://googleapis.dev/python/logging/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559764", - "release_level": "ga", - "language": "python", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-logging", - "api_id": "logging.googleapis.com" -} \ No newline at end of file diff --git a/logging/CHANGELOG.md b/logging/CHANGELOG.md deleted file mode 100644 index 05caf8d580b8..000000000000 --- a/logging/CHANGELOG.md +++ /dev/null @@ -1,275 +0,0 @@ -# Changelog - -[PyPI History][1] - -[1]: https://pypi.org/project/google-cloud-logging/#history - -## 1.14.0 - -10-15-2019 06:50 PDT - - -### Implementation Changes -- Fix proto copy. ([#9420](https://github.com/googleapis/google-cloud-python/pull/9420)) - -### Dependencies -- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) - -## 1.13.0 - -09-23-2019 10:00 PDT - -### Implementation Changes -- Pass 'stream' argument to super in 'ContainerEngineHandler.__init__'. ([#9166](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9166)) - -### New Features -- Add LoggingV2Servicer, LogSinks, logging_metrics, and log_entry. Add LogSeverity and HttpRequest types (via synth). ([#9262](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9262)) -- Add client_options to logging v1 ([#9046](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9046)) - -### Documentation -- Remove compatability badges from READMEs. ([#9035](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9035)) - -### Internal / Testing Changes -- Docs: Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9085)) -- Delete custom synth removing gRPC send/recv msg size limits. ([#8939](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8939)) - -## 1.12.1 - -08-01-2019 09:45 PDT - - -### Implementation Changes -- Remove gRPC size restrictions (4MB default) ([#8860](https://github.com/googleapis/google-cloud-python/pull/8860)) -- Map stdlib loglevels to Stackdriver severity enum values. ([#8837](https://github.com/googleapis/google-cloud-python/pull/8837)) - -### Documentation -- Fix 'list_entries' example with projects. ([#8858](https://github.com/googleapis/google-cloud-python/pull/8858)) - -### Internal / Testing Changes -- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) - -## 1.12.0 - -07-24-2019 16:47 PDT - - -### Implementation Changes -- Set the 'timestamp' on log records created by handler. ([#8227](https://github.com/googleapis/google-cloud-python/pull/8227)) -- Clarify worker thread implementation. ([#8228](https://github.com/googleapis/google-cloud-python/pull/8228)) - -### New Features -- Add path-construction helpers to GAPIC clients (via synth). ([#8631](https://github.com/googleapis/google-cloud-python/pull/8631)) -- Add 'client_options' support, update list method docstrings (via synth). ([#8535](https://github.com/googleapis/google-cloud-python/pull/8535)) - -### Dependencies -- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) - -### Documentation -- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) -- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) - -### Internal / Testing Changes -- Re-add "generated" markers (via synth). ([#8538](https://github.com/googleapis/google-cloud-python/pull/8538)) -- Add nox session 'docs' to remaining manual clients. ([#8478](https://github.com/googleapis/google-cloud-python/pull/8478)) -- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) -- Fix tests broken in PR [#8227](https://github.com/googleapis/google-cloud-python/pull/8227). ([#8273](https://github.com/googleapis/google-cloud-python/pull/8273)) -- Add empty lines. ([#8064](https://github.com/googleapis/google-cloud-python/pull/8064)) -- Use alabaster theme everwhere. ([#8021](https://github.com/googleapis/google-cloud-python/pull/8021)) - -## 1.11.0 - -05-16-2019 12:27 PDT - - -### Implementation Changes -- Add routing header to method metadata (via synth). ([#7598](https://github.com/googleapis/google-cloud-python/pull/7598)) -- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) -- Use FQDN for GCE metadata endpoint. ([#7520](https://github.com/googleapis/google-cloud-python/pull/7520)) - -### New Features -- Add `client_info` support to client. ([#7874](https://github.com/googleapis/google-cloud-python/pull/7874)) and ([#7901](https://github.com/googleapis/google-cloud-python/pull/7901)) - -### Dependencies -- Pin `google-cloud-core >= 1.0.0, < 2.0dev`. ([#7993](https://github.com/googleapis/google-cloud-python/pull/7993)) - -### Documentation -- Update client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) -- Reformat snippet (via synth). ([#7216](https://github.com/googleapis/google-cloud-python/pull/7216)) -- Add snippet for logging a resource. ([#7212](https://github.com/googleapis/google-cloud-python/pull/7212)) - -### Internal / Testing Changes -- Reorder methods in file (via synth). ([#7810](https://github.com/googleapis/google-cloud-python/pull/7810)) -- Copy lintified proto files (via synth). ([#7450](https://github.com/googleapis/google-cloud-python/pull/7450)) -- Trivial gapic-generator change. ([#7230](https://github.com/googleapis/google-cloud-python/pull/7230)) -- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) - -## 1.10.0 - -01-17-2019 15:37 PST - - -### Implementation Changes -- Change WriteLogEntries retry policy. -- Protoc-generated serialization update. ([#7088](https://github.com/googleapis/google-cloud-python/pull/7088)) -- GAPIC generation fixes. ([#7061](https://github.com/googleapis/google-cloud-python/pull/7061)) - -### Internal / Testing Changes -- Update copyright headers. -- Use 'python-3.6' for 'blacken' run. ([#7064](https://github.com/googleapis/google-cloud-python/pull/7064)) - -## 1.9.1 - -12-17-2018 16:49 PST - - -### Implementation Changes -- Allow setting name, args on default handler (post-blacken) ([#6828](https://github.com/googleapis/google-cloud-python/pull/6828)) - -### Documentation -- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) -- Normalize docs for `page_size` / `max_results` / `page_token`. ([#6842](https://github.com/googleapis/google-cloud-python/pull/6842)) - -## 1.9.0 - -12-10-2018 12:55 PST - - -### Implementation Changes -- Import `iam.policy` from `google.api_core`. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) -- Pick up fixes to GAPIC generator. ([#6631](https://github.com/googleapis/google-cloud-python/pull/6631)) -- Fix `client_info` bug, update docstrings via synth. ([#6435](https://github.com/googleapis/google-cloud-python/pull/6435)) -- Revert "Allow turning on JSON Detection in StackDriver" ([#6352](https://github.com/googleapis/google-cloud-python/pull/6352)) -- Allow turning on JSON Detection in StackDriver ([#6293](https://github.com/googleapis/google-cloud-python/pull/6293)) - -### New Features -- Add support for additional 'LogEntry' fields ([#6229](https://github.com/googleapis/google-cloud-python/pull/6229)) - -### Dependencies -- Update dependency to google-cloud-core ([#6835](https://github.com/googleapis/google-cloud-python/pull/6835)) -- Bump minimum `api_core` version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391)) - - -### Internal / Testing Changes -- Change the url to the canonical one ([#6843](https://github.com/googleapis/google-cloud-python/pull/6843)) -- Omit local deps ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) -- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) -- Blackening Continued... ([#6667](https://github.com/googleapis/google-cloud-python/pull/6667)) -- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) -- Logging: add 'synth.py'. ([#6081](https://github.com/googleapis/google-cloud-python/pull/6081)) - -## 1.8.0 - -10-17-2018 14:23 PDT - -### Implementation Changes - -- Logging: allow more tries on inner retry for '_list_entries'. ([#6179](https://github.com/googleapis/google-cloud-python/pull/6179)) -- Accommodate payload-less log entries. ([#6103](https://github.com/googleapis/google-cloud-python/pull/6103)) - -### New Features - -- Logging: support request-correlated logging in App Engine standard python37 runtime ([#6118](https://github.com/googleapis/google-cloud-python/pull/6118)) - -### Documentation - -- Logging: fix class reference in docstring ([#6153](https://github.com/googleapis/google-cloud-python/pull/6153)) -- Translate / Logging / Language: restore detailed usage docs. ([#5999](https://github.com/googleapis/google-cloud-python/pull/5999)) -- Redirect renamed 'usage.html'/'client.html' -> 'index.html'. ([#5996](https://github.com/googleapis/google-cloud-python/pull/5996)) - -### Internal / Testing Changes - -- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) -- Logging: harden systest teardown against 'DeadlineExceeded' retry errors. ([#6182](https://github.com/googleapis/google-cloud-python/pull/6182)) -- Logging: fix lint errors. ([#6183](https://github.com/googleapis/google-cloud-python/pull/6183)) -- Harden sink / metric creation against transient errors. ([#6180](https://github.com/googleapis/google-cloud-python/pull/6180)) -- Logging: test both GCLOUD_PROJECT and GOOGLE_CLOUD_PROJECT env vars ([#6138](https://github.com/googleapis/google-cloud-python/pull/6138)) -- Harden 'test_list_entry_with_unregistered' against 429 errors. ([#6181](https://github.com/googleapis/google-cloud-python/pull/6181)) -- Prep logging docs for repo split. ([#5943](https://github.com/googleapis/google-cloud-python/pull/5943)) - -## 1.7.0 - -### Implementation Changes -- Print to stderr instead of stdout when exiting the program ([#5569](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5569)) -- Avoid overwriting '__module__' of messages from shared modules. ([#5364](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5364)) -- Support older Django versions in request middleware [#5024](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5024) -- Fix bad trove classifier [#5386](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5386) - -### New Features -- Add support for `trace` and `span_id` to logging async API ([#5908](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5908)) -- Add support for `span_id` attribute of log entries ([#5885](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5885)) -- Add support for `trace` attribute of log entries ([#5878](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5878)) -- Add support for Python 3.7 and remove 3.4 ([#5295](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5295)) - -### Documentation -- Replace links to '/stable/' with '/latest/'. ([#5901](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5901)) - -### Internal / Testing Changes -- Nox: use inplace installs ([#5865](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5865)) -- Unflake logging systests ([#5698](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5698)) -- Harden `_list_entries` system test further against backoff failure. ([#5551](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5551)) -- Harden logging systests ([#5496](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5496)) -- Harden system tests against 'ResourceExhausted' quota errors. ([#5486](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5486)) -- Modify system tests to use prerelease versions of grpcio ([#5304](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5304)) -- Plug leaky sink in systests. ([#5247](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5247)) - -## 1.6.0 - -### Dependencies - -- The minimum version for `google-api-core` has been updated to version 1.0.0. This may cause some incompatibility with older google-cloud libraries, you will need to update those libraries if you have a dependency conflict. (#4944, #4946) - -### Testing and internal changes - -- Install local dependencies when running lint (#4936) -- Re-enable lint for tests, remove usage of pylint (#4921) -- Normalize all setup.py files (#4909) - -## 1.5.0 - -### New features - -- Added `max_latency` to `BackgroundThreadTransport`. (#4762) -- Added support for unique writer identity in `Sink`. (#4595, #4708, #4704, #4706) - -### Implementation changes - -- The underlying auto-generated client library was re-generated to pick up new features and bugfixes. (#4759) -- Moved the code path of `get_gae_labels()` to `emit()`. (#4824) -- Removed a debug print statement. (#4838) -- `LogSink.create` captures the server-generated `writerIdentity`. (#4707) -- Accomodated a back-end change making `Sink.filter` optional. (#4699) - -### Testing - -- Fixed system tests (#4768) -- Hardened test for `retrieve_metadata_server` against transparent DNS proxies. (#4698) -- Added cleanup for Pub / Sub topic in logging system test. (#4532) -- Added another check for Python 2.7 in Logging `nox -s default`. (#4523) -- Pinned `django` test dependency to `< 2.0` in Python 2.7. (#4519) -- Maked a `nox -s default` session for all packages. (#4324) -- Shortened test names. (#4321) - -### Documentation - -- Added doc to highlight missing `uniqueWriterIdentity` field. (#4579) -- Fixing "Fore" -> "For" typo in README docs. (#4317) - -## 1.4.0 - -### Implementation Changes - -- Remove `deepcopy` of `Client._http` in background transport (#3954) - -### Documentation - -- Added link to "Python Development Environment Setup Guide" in - project README (#4187, h/t to @michaelawyu) - -### Dependencies - -- Upgrading to `google-cloud-core >= 0.28.0` and adding dependency - on `google-api-core` (#4221, #4280) -- Deferring to `google-api-core` for `grpcio` and - `googleapis-common-protos`dependencies (#4096, #4098) - -PyPI: https://pypi.org/project/google-cloud-logging/1.4.0/ diff --git a/logging/LICENSE b/logging/LICENSE deleted file mode 100644 index d64569567334..000000000000 --- a/logging/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/logging/MANIFEST.in b/logging/MANIFEST.in deleted file mode 100644 index 9cbf175afe6b..000000000000 --- a/logging/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include README.rst LICENSE -recursive-include google *.json *.proto -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/logging/README.rst b/logging/README.rst deleted file mode 100644 index 220a6cf17be2..000000000000 --- a/logging/README.rst +++ /dev/null @@ -1,113 +0,0 @@ -Python Client for Stackdriver Logging -===================================== - -|pypi| |versions| - -`Stackdriver Logging API`_: Writes log entries and manages your Stackdriver -Logging configuration. - -- `Client Library Documentation`_ -- `Product Documentation`_ - -.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-logging.svg - :target: https://pypi.org/project/google-cloud-logging/ -.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg - :target: https://pypi.org/project/google-cloud-logging/ -.. _Stackdriver Logging API: https://cloud.google.com/logging -.. _Client Library Documentation: https://googleapis.dev/python/logging/latest -.. _Product Documentation: https://cloud.google.com/logging/docs - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. `Enable the Stackdriver Logging API.`_ -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the Stackdriver Logging API.: https://cloud.google.com/logging -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Supported Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 - -Deprecated Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - source /bin/activate - /bin/pip install google-cloud-logging - - -Windows -^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-logging - -Using the API -------------- - -.. code:: python - - from google.cloud import logging_v2 - - client = logging_v2.LoggingServiceV2Client() - entries = [] - response = client.write_log_entries(entries) - -.. code:: python - - from google.cloud import logging - client = logging.Client() - logger = client.logger('log_name') - logger.log_text('A simple entry') # API call - -Example of fetching entries: - -.. code:: python - - from google.cloud import logging - client = logging.Client() - logger = client.logger('log_name') - for entry in logger.list_entries(): - print(entry.payload) - -Next Steps -~~~~~~~~~~ - -- Read the `Client Library Documentation`_ for to see other available - methods on the client. -- Read the `Product documentation`_ to learn more about the product and see - How-to Guides. diff --git a/logging/docs/README.rst b/logging/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/logging/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/logging/docs/_static/custom.css b/logging/docs/_static/custom.css deleted file mode 100644 index 0abaf229fce3..000000000000 --- a/logging/docs/_static/custom.css +++ /dev/null @@ -1,4 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} \ No newline at end of file diff --git a/logging/docs/_templates/layout.html b/logging/docs/_templates/layout.html deleted file mode 100644 index 228529efe2d2..000000000000 --- a/logging/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/logging/docs/changelog.md b/logging/docs/changelog.md deleted file mode 120000 index 04c99a55caae..000000000000 --- a/logging/docs/changelog.md +++ /dev/null @@ -1 +0,0 @@ -../CHANGELOG.md \ No newline at end of file diff --git a/logging/docs/client.rst b/logging/docs/client.rst deleted file mode 100644 index f04d5c5255f1..000000000000 --- a/logging/docs/client.rst +++ /dev/null @@ -1,6 +0,0 @@ -Stackdriver Logging Client -========================== - -.. automodule:: google.cloud.logging.client - :members: - :show-inheritance: diff --git a/logging/docs/conf.py b/logging/docs/conf.py deleted file mode 100644 index c0253fdd03a8..000000000000 --- a/logging/docs/conf.py +++ /dev/null @@ -1,359 +0,0 @@ -# -*- coding: utf-8 -*- -# -# google-cloud-logging documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-logging" -copyright = u"2017, Google" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-logging-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - #'preamble': '', - # Latex figure (float) alignment - #'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-logging.tex", - u"google-cloud-logging Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-logging", - u"google-cloud-logging Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-logging", - u"google-cloud-logging Documentation", - author, - "google-cloud-logging", - "GAPIC library for the logging API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/logging/docs/entries.rst b/logging/docs/entries.rst deleted file mode 100644 index 223eadc0756e..000000000000 --- a/logging/docs/entries.rst +++ /dev/null @@ -1,7 +0,0 @@ -Entries -======= - -.. automodule:: google.cloud.logging.entries - :members: - :show-inheritance: - :member-order: groupwise diff --git a/logging/docs/gapic/v2/api.rst b/logging/docs/gapic/v2/api.rst deleted file mode 100644 index 2dc6bf6fcc6b..000000000000 --- a/logging/docs/gapic/v2/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Stackdriver Logging API -================================== - -.. automodule:: google.cloud.logging_v2 - :members: - :inherited-members: \ No newline at end of file diff --git a/logging/docs/gapic/v2/types.rst b/logging/docs/gapic/v2/types.rst deleted file mode 100644 index 5521d4f9bc12..000000000000 --- a/logging/docs/gapic/v2/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Stackdriver Logging API Client -======================================== - -.. automodule:: google.cloud.logging_v2.types - :members: \ No newline at end of file diff --git a/logging/docs/handlers-app-engine.rst b/logging/docs/handlers-app-engine.rst deleted file mode 100644 index 71c45e3690be..000000000000 --- a/logging/docs/handlers-app-engine.rst +++ /dev/null @@ -1,6 +0,0 @@ -Google App Engine flexible Log Handler -====================================== - -.. automodule:: google.cloud.logging.handlers.app_engine - :members: - :show-inheritance: diff --git a/logging/docs/handlers-container-engine.rst b/logging/docs/handlers-container-engine.rst deleted file mode 100644 index a0c6b2bc9228..000000000000 --- a/logging/docs/handlers-container-engine.rst +++ /dev/null @@ -1,6 +0,0 @@ -Google Container Engine Log Handler -=================================== - -.. automodule:: google.cloud.logging.handlers.container_engine - :members: - :show-inheritance: diff --git a/logging/docs/handlers.rst b/logging/docs/handlers.rst deleted file mode 100644 index 1a258a88a541..000000000000 --- a/logging/docs/handlers.rst +++ /dev/null @@ -1,6 +0,0 @@ -Python Logging Module Handler -============================== - -.. automodule:: google.cloud.logging.handlers.handlers - :members: - :show-inheritance: diff --git a/logging/docs/index.rst b/logging/docs/index.rst deleted file mode 100644 index f617201a90ab..000000000000 --- a/logging/docs/index.rst +++ /dev/null @@ -1,19 +0,0 @@ -.. include:: README.rst - -Documentation -------------------- -.. toctree:: - :maxdepth: 3 - - v1 - v2 - -Changelog -~~~~~~~~~ - -For a list of all ``google-cloud-logging`` releases: - -.. toctree:: - :maxdepth: 2 - - changelog diff --git a/logging/docs/logger.rst b/logging/docs/logger.rst deleted file mode 100644 index 72533ba33774..000000000000 --- a/logging/docs/logger.rst +++ /dev/null @@ -1,6 +0,0 @@ -Logger -====== - -.. automodule:: google.cloud.logging.logger - :members: - :show-inheritance: diff --git a/logging/docs/metric.rst b/logging/docs/metric.rst deleted file mode 100644 index ca30e3c89eca..000000000000 --- a/logging/docs/metric.rst +++ /dev/null @@ -1,6 +0,0 @@ -Metrics -======= - -.. automodule:: google.cloud.logging.metric - :members: - :show-inheritance: diff --git a/logging/docs/sink.rst b/logging/docs/sink.rst deleted file mode 100644 index 35e88562bbee..000000000000 --- a/logging/docs/sink.rst +++ /dev/null @@ -1,6 +0,0 @@ -Sinks -===== - -.. automodule:: google.cloud.logging.sink - :members: - :show-inheritance: diff --git a/logging/docs/snippets.py b/logging/docs/snippets.py deleted file mode 100644 index 778327989b0f..000000000000 --- a/logging/docs/snippets.py +++ /dev/null @@ -1,434 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Testable usage examples for Stackdriver Logging API wrapper - -Each example function takes a ``client`` argument (which must be an instance -of :class:`google.cloud.logging.client.Client`) and uses it to perform a task -with the API. - -To facilitate running the examples as system tests, each example is also passed -a ``to_delete`` list; the function adds to the list any objects created which -need to be deleted during teardown. -""" - -import time - -from google.cloud.logging.client import Client - - -def snippet(func): - """Mark ``func`` as a snippet example function.""" - func._snippet = True - return func - - -def _millis(): - return time.time() * 1000 - - -def do_something_with(item): # pylint: disable=unused-argument - pass - - -# pylint: disable=reimported,unused-variable,unused-argument -@snippet -def instantiate_client(_unused_client, _unused_to_delete): - """Instantiate client.""" - - # [START client_create_default] - from google.cloud import logging - - client = logging.Client() - # [END client_create_default] - - credentials = object() - # [START client_create_explicit] - from google.cloud import logging - - client = logging.Client(project="my-project", credentials=credentials) - # [END client_create_explicit] - - -# pylint: enable=reimported,unused-variable,unused-argument - - -@snippet -def client_list_entries(client, to_delete): # pylint: disable=unused-argument - """List entries via client.""" - - # [START client_list_entries_default] - for entry in client.list_entries(): # API call(s) - do_something_with(entry) - # [END client_list_entries_default] - - # [START client_list_entries_filter] - FILTER = "logName:log_name AND textPayload:simple" - for entry in client.list_entries(filter_=FILTER): # API call(s) - do_something_with(entry) - # [END client_list_entries_filter] - - # [START client_list_entries_order_by] - from google.cloud.logging import DESCENDING - - for entry in client.list_entries(order_by=DESCENDING): # API call(s) - do_something_with(entry) - # [END client_list_entries_order_by] - - # [START client_list_entries_paged] - iterator = client.list_entries() - pages = iterator.pages - - page1 = next(pages) # API call - for entry in page1: - do_something_with(entry) - - page2 = next(pages) # API call - for entry in page2: - do_something_with(entry) - # [END client_list_entries_paged] - - -# @snippet Commented because we need real project IDs to test -def client_list_entries_multi_project( - client, to_delete -): # pylint: disable=unused-argument - """List entries via client across multiple projects.""" - - # [START client_list_entries_multi_project] - PROJECT_IDS = ["one-project", "another-project"] - for entry in client.list_entries(projects=PROJECT_IDS): # API call(s) - do_something_with(entry) - # [END client_list_entries_multi_project] - - -@snippet -def logger_usage(client, to_delete): - """Logger usage.""" - LOG_NAME = "logger_usage_%d" % (_millis()) - - # [START logger_create] - logger = client.logger(LOG_NAME) - # [END logger_create] - to_delete.append(logger) - - # [START logger_log_text] - logger.log_text("A simple entry") # API call - # [END logger_log_text] - - # [START logger_log_struct] - logger.log_struct( - {"message": "My second entry", "weather": "partly cloudy"} - ) # API call - # [END logger_log_struct] - - # [START logger_log_resource_text] - from google.cloud.logging.resource import Resource - - res = Resource( - type="generic_node", - labels={ - "location": "us-central1-a", - "namespace": "default", - "node_id": "10.10.10.1", - }, - ) - logger.log_struct( - {"message": "My first entry", "weather": "partly cloudy"}, resource=res - ) - # [END logger_log_resource_text] - - # [START logger_list_entries] - from google.cloud.logging import DESCENDING - - for entry in logger.list_entries(order_by=DESCENDING): # API call(s) - do_something_with(entry) - # [END logger_list_entries] - - def _logger_delete(): - # [START logger_delete] - logger.delete() # API call - # [END logger_delete] - - _backoff_not_found(_logger_delete) - to_delete.remove(logger) - - -@snippet -def metric_crud(client, to_delete): - """Metric CRUD.""" - METRIC_NAME = "robots-%d" % (_millis(),) - DESCRIPTION = "Robots all up in your server" - FILTER = "logName:apache-access AND textPayload:robot" - UPDATED_FILTER = "textPayload:robot" - UPDATED_DESCRIPTION = "Danger, Will Robinson!" - - # [START client_list_metrics] - for metric in client.list_metrics(): # API call(s) - do_something_with(metric) - # [END client_list_metrics] - - # [START metric_create] - metric = client.metric(METRIC_NAME, filter_=FILTER, description=DESCRIPTION) - assert not metric.exists() # API call - metric.create() # API call - assert metric.exists() # API call - # [END metric_create] - to_delete.append(metric) - - # [START metric_reload] - existing_metric = client.metric(METRIC_NAME) - existing_metric.reload() # API call - # [END metric_reload] - assert existing_metric.filter_ == FILTER - assert existing_metric.description == DESCRIPTION - - # [START metric_update] - existing_metric.filter_ = UPDATED_FILTER - existing_metric.description = UPDATED_DESCRIPTION - existing_metric.update() # API call - # [END metric_update] - existing_metric.reload() - assert existing_metric.filter_ == UPDATED_FILTER - assert existing_metric.description == UPDATED_DESCRIPTION - - def _metric_delete(): - # [START metric_delete] - metric.delete() - # [END metric_delete] - - _backoff_not_found(_metric_delete) - to_delete.remove(metric) - - -def _sink_storage_setup(client): - from google.cloud import storage - - BUCKET_NAME = "sink-storage-%d" % (_millis(),) - client = storage.Client() - bucket = client.bucket(BUCKET_NAME) - bucket.create() - - # [START sink_bucket_permissions] - bucket.acl.reload() # API call - logs_group = bucket.acl.group("cloud-logs@google.com") - logs_group.grant_owner() - bucket.acl.add_entity(logs_group) - bucket.acl.save() # API call - # [END sink_bucket_permissions] - - return bucket - - -@snippet -def sink_storage(client, to_delete): - """Sink log entries to storage.""" - bucket = _sink_storage_setup(client) - to_delete.append(bucket) - SINK_NAME = "robots-storage-%d" % (_millis(),) - FILTER = "textPayload:robot" - - # [START sink_storage_create] - DESTINATION = "storage.googleapis.com/%s" % (bucket.name,) - sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) - assert not sink.exists() # API call - sink.create() # API call - assert sink.exists() # API call - # [END sink_storage_create] - to_delete.insert(0, sink) # delete sink before bucket - - -def _sink_bigquery_setup(client): - from google.cloud import bigquery - - DATASET_NAME = "sink_bigquery_%d" % (_millis(),) - client = bigquery.Client() - dataset = client.dataset(DATASET_NAME) - dataset.create() - dataset.reload() - - # [START sink_dataset_permissions] - from google.cloud.bigquery.dataset import AccessGrant - - grants = dataset.access_grants - grants.append(AccessGrant("WRITER", "groupByEmail", "cloud-logs@google.com")) - dataset.access_grants = grants - dataset.update() # API call - # [END sink_dataset_permissions] - - return dataset - - -@snippet -def sink_bigquery(client, to_delete): - """Sink log entries to bigquery.""" - dataset = _sink_bigquery_setup(client) - to_delete.append(dataset) - SINK_NAME = "robots-bigquery-%d" % (_millis(),) - FILTER = "textPayload:robot" - - # [START sink_bigquery_create] - DESTINATION = "bigquery.googleapis.com%s" % (dataset.path,) - sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) - assert not sink.exists() # API call - sink.create() # API call - assert sink.exists() # API call - # [END sink_bigquery_create] - to_delete.insert(0, sink) # delete sink before dataset - - -def _sink_pubsub_setup(client): - from google.cloud import pubsub - - TOPIC_NAME = "sink-pubsub-%d" % (_millis(),) - client = pubsub.Client() - topic = client.topic(TOPIC_NAME) - topic.create() - - # [START sink_topic_permissions] - policy = topic.get_iam_policy() # API call - policy.owners.add(policy.group("cloud-logs@google.com")) - topic.set_iam_policy(policy) # API call - # [END sink_topic_permissions] - - return topic - - -@snippet -def sink_pubsub(client, to_delete): - """Sink log entries to pubsub.""" - topic = _sink_pubsub_setup(client) - to_delete.append(topic) - SINK_NAME = "robots-pubsub-%d" % (_millis(),) - FILTER = "logName:apache-access AND textPayload:robot" - UPDATED_FILTER = "textPayload:robot" - - # [START sink_pubsub_create] - DESTINATION = "pubsub.googleapis.com/%s" % (topic.full_name,) - sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) - assert not sink.exists() # API call - sink.create() # API call - assert sink.exists() # API call - # [END sink_pubsub_create] - to_delete.insert(0, sink) # delete sink before topic - - # [START client_list_sinks] - for sink in client.list_sinks(): # API call(s) - do_something_with(sink) - # [END client_list_sinks] - - # [START sink_reload] - existing_sink = client.sink(SINK_NAME) - existing_sink.reload() - # [END sink_reload] - assert existing_sink.filter_ == FILTER - assert existing_sink.destination == DESTINATION - - # [START sink_update] - existing_sink.filter_ = UPDATED_FILTER - existing_sink.update() - # [END sink_update] - existing_sink.reload() - assert existing_sink.filter_ == UPDATED_FILTER - - # [START sink_delete] - sink.delete() - # [END sink_delete] - to_delete.pop(0) - - -@snippet -def logging_handler(client): - # [START create_default_handler] - import logging - - handler = client.get_default_handler() - cloud_logger = logging.getLogger("cloudLogger") - cloud_logger.setLevel(logging.INFO) - cloud_logger.addHandler(handler) - cloud_logger.error("bad news") - # [END create_default_handler] - - # [START create_cloud_handler] - from google.cloud.logging.handlers import CloudLoggingHandler - - handler = CloudLoggingHandler(client) - cloud_logger = logging.getLogger("cloudLogger") - cloud_logger.setLevel(logging.INFO) - cloud_logger.addHandler(handler) - cloud_logger.error("bad news") - # [END create_cloud_handler] - - # [START create_named_handler] - handler = CloudLoggingHandler(client, name="mycustomlog") - # [END create_named_handler] - - -@snippet -def setup_logging(client): - import logging - - # [START setup_logging] - client.setup_logging(log_level=logging.INFO) - # [END setup_logging] - - # [START setup_logging_excludes] - client.setup_logging(log_level=logging.INFO, excluded_loggers=("werkzeug",)) - # [END setup_logging_excludes] - - -def _line_no(func): - return func.__code__.co_firstlineno - - -def _find_examples(): - funcs = [obj for obj in globals().values() if getattr(obj, "_snippet", False)] - for func in sorted(funcs, key=_line_no): - yield func - - -def _name_and_doc(func): - return func.__name__, func.__doc__ - - -def _backoff_not_found(deleter): - from google.cloud.exceptions import NotFound - - timeouts = [1, 2, 4, 8, 16] - while timeouts: - try: - deleter() - except NotFound: - time.sleep(timeouts.pop(0)) - else: - break - - -def main(): - client = Client() - for example in _find_examples(): - to_delete = [] - print("%-25s: %s" % _name_and_doc(example)) - try: - example(client, to_delete) - except AssertionError as failure: - print(" FAIL: %s" % (failure,)) - except Exception as error: # pylint: disable=broad-except - print(" ERROR: %r" % (error,)) - for item in to_delete: - _backoff_not_found(item.delete) - - -if __name__ == "__main__": - main() diff --git a/logging/docs/stdlib-usage.rst b/logging/docs/stdlib-usage.rst deleted file mode 100644 index cba4080b5f5e..000000000000 --- a/logging/docs/stdlib-usage.rst +++ /dev/null @@ -1,70 +0,0 @@ -Integration with Python logging module --------------------------------------- - - -It's possible to tie the Python :mod:`logging` module directly into Google Cloud Logging. To use it, -create a :class:`CloudLoggingHandler ` instance from your -Logging client. - -.. code-block:: python - - >>> import logging - >>> import google.cloud.logging # Don't conflict with standard logging - >>> from google.cloud.logging.handlers import CloudLoggingHandler - >>> client = google.cloud.logging.Client() - >>> handler = CloudLoggingHandler(client) - >>> cloud_logger = logging.getLogger('cloudLogger') - >>> cloud_logger.setLevel(logging.INFO) # defaults to WARN - >>> cloud_logger.addHandler(handler) - >>> cloud_logger.error('bad news') - -.. note:: - - This handler by default uses an asynchronous transport that sends log entries on a background - thread. However, the API call will still be made in the same process. For other transport - options, see the transports section. - -All logs will go to a single custom log, which defaults to "python". The name of the Python -logger will be included in the structured log entry under the "python_logger" field. You can -change it by providing a name to the handler: - -.. code-block:: python - - >>> handler = CloudLoggingHandler(client, name="mycustomlog") - -It is also possible to attach the handler to the root Python logger, so that for example a plain -`logging.warn` call would be sent to Cloud Logging, as well as any other loggers created. However, -you must avoid infinite recursion from the logging calls the client itself makes. A helper -method :meth:`setup_logging ` is provided to configure -this automatically: - -.. code-block:: python - - >>> import logging - >>> import google.cloud.logging # Don't conflict with standard logging - >>> from google.cloud.logging.handlers import CloudLoggingHandler, setup_logging - >>> client = google.cloud.logging.Client() - >>> handler = CloudLoggingHandler(client) - >>> logging.getLogger().setLevel(logging.INFO) # defaults to WARN - >>> setup_logging(handler) - >>> logging.error('bad news') - -You can also exclude certain loggers: - -.. code-block:: python - - >>> setup_logging(handler, excluded_loggers=('werkzeug',)) - - - -Python logging handler transports -================================== - -The Python logging handler can use different transports. The default is -:class:`google.cloud.logging.handlers.BackgroundThreadTransport`. - - 1. :class:`google.cloud.logging.handlers.BackgroundThreadTransport` this is the default. It writes - entries on a background :class:`python.threading.Thread`. - - 1. :class:`google.cloud.logging.handlers.SyncTransport` this handler does a direct API call on each - logging statement to write the entry. diff --git a/logging/docs/transports-base.rst b/logging/docs/transports-base.rst deleted file mode 100644 index 5b52c46cadcb..000000000000 --- a/logging/docs/transports-base.rst +++ /dev/null @@ -1,6 +0,0 @@ -Python Logging Handler Sync Transport -====================================== - -.. automodule:: google.cloud.logging.handlers.transports.base - :members: - :show-inheritance: diff --git a/logging/docs/transports-sync.rst b/logging/docs/transports-sync.rst deleted file mode 100644 index edb2b72f578d..000000000000 --- a/logging/docs/transports-sync.rst +++ /dev/null @@ -1,6 +0,0 @@ -Python Logging Handler Sync Transport -====================================== - -.. automodule:: google.cloud.logging.handlers.transports.sync - :members: - :show-inheritance: diff --git a/logging/docs/transports-thread.rst b/logging/docs/transports-thread.rst deleted file mode 100644 index 45780b27fe42..000000000000 --- a/logging/docs/transports-thread.rst +++ /dev/null @@ -1,7 +0,0 @@ -Python Logging Handler Threaded Transport -========================================= - - -.. automodule:: google.cloud.logging.handlers.transports.background_thread - :members: - :show-inheritance: diff --git a/logging/docs/usage.rst b/logging/docs/usage.rst deleted file mode 100644 index f5662bcbaa08..000000000000 --- a/logging/docs/usage.rst +++ /dev/null @@ -1,361 +0,0 @@ -Usage Guide -=========== - -Writing log entries -------------------- - -To write log entries, first create a -:class:`~google.cloud.logging.logger.Logger`, passing the "log name" with -which to associate the entries: - -.. literalinclude:: snippets.py - :start-after: [START logger_create] - :end-before: [END logger_create] - :dedent: 4 - -Write a simple text entry to the logger. - -.. literalinclude:: snippets.py - :start-after: [START logger_log_text] - :end-before: [END logger_log_text] - :dedent: 4 - -Write a dictionary entry to the logger. - -.. literalinclude:: snippets.py - :start-after: [START logger_log_struct] - :end-before: [END logger_log_struct] - :dedent: 4 - -Write a simple text entry and resource to the logger. - -Supported Resource values are listed at `Monitored Resource Types`_ - -.. _Monitored Resource Types: https://cloud.google.com/logging/docs/api/v2/resource-list - - -.. literalinclude:: snippets.py - :start-after: [START logger_log_resource_text] - :end-before: [END logger_log_resource_text] - :dedent: 4 - -Retrieving log entries ----------------------- - -Fetch entries for the default project. - -.. literalinclude:: snippets.py - :start-after: [START client_list_entries_default] - :end-before: [END client_list_entries_default] - :dedent: 4 - -Entries returned by -:meth:`Client.list_entries ` -or -:meth:`Logger.list_entries ` -will be instances of one of the following classes: - -- :class:`~google.cloud.logging.entries.TextEntry` -- :class:`~google.cloud.logging.entries.StructEntry` -- :class:`~google.cloud.logging.entries.ProtobufEntry` - -Fetch entries across multiple projects. - -.. literalinclude:: snippets.py - :start-after: [START client_list_entries_multi_project] - :end-before: [END client_list_entries_multi_project] - :dedent: 4 - -Filter entries retrieved using the `Advanced Logs Filters`_ syntax - -.. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters - -Fetch entries for the default project. - -.. literalinclude:: snippets.py - :start-after: [START client_list_entries_filter] - :end-before: [END client_list_entries_filter] - :dedent: 4 - -Sort entries in descending timestamp order. - -.. literalinclude:: snippets.py - :start-after: [START client_list_entries_order_by] - :end-before: [END client_list_entries_order_by] - :dedent: 4 - -Retrieve entries in batches of 10, iterating until done. - -.. literalinclude:: snippets.py - :start-after: [START client_list_entries_paged] - :end-before: [END client_list_entries_paged] - :dedent: 4 - -Retrieve entries for a single logger, sorting in descending timestamp order: - -.. literalinclude:: snippets.py - :start-after: [START logger_list_entries] - :end-before: [END logger_list_entries] - :dedent: 4 - - -Delete all entries for a logger -------------------------------- - -.. literalinclude:: snippets.py - :start-after: [START logger_delete] - :end-before: [END logger_delete] - :dedent: 8 - - -Manage log metrics ------------------- - -Metrics are counters of entries which match a given filter. They can be -used within Stackdriver Monitoring to create charts and alerts. - -List all metrics for a project: - -.. literalinclude:: snippets.py - :start-after: [START client_list_metrics] - :end-before: [END client_list_metrics] - :dedent: 4 - -Create a metric: - -.. literalinclude:: snippets.py - :start-after: [START metric_create] - :end-before: [END metric_create] - :dedent: 4 - -Refresh local information about a metric: - -.. literalinclude:: snippets.py - :start-after: [START metric_reload] - :end-before: [END metric_reload] - :dedent: 4 - -Update a metric: - -.. literalinclude:: snippets.py - :start-after: [START metric_update] - :end-before: [END metric_update] - :dedent: 4 - -Delete a metric: - -.. literalinclude:: snippets.py - :start-after: [START metric_delete] - :end-before: [END metric_delete] - :dedent: 4 - -Export log entries using sinks ------------------------------- - -Sinks allow exporting entries which match a given filter to Cloud Storage -buckets, BigQuery datasets, or Cloud Pub/Sub topics. - -Export to Cloud Storage -~~~~~~~~~~~~~~~~~~~~~~~ - -Make sure that the storage bucket you want to export logs too has -``cloud-logs@google.com`` as the owner. See -`Setting permissions for Cloud Storage`_. - -.. _Setting permissions for Cloud Storage: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_cloud_storage - -Add ``cloud-logs@google.com`` as the owner of the bucket: - -.. literalinclude:: snippets.py - :start-after: [START sink_bucket_permissions] - :end-before: [END sink_bucket_permissions] - :dedent: 4 - -Create a Cloud Storage sink: - -.. literalinclude:: snippets.py - :start-after: [START sink_storage_create] - :end-before: [END sink_storage_create] - :dedent: 4 - - -Export to BigQuery -~~~~~~~~~~~~~~~~~~ - -To export logs to BigQuery you must log into the Cloud Platform Console -and add ``cloud-logs@google.com`` to a dataset. - -See: `Setting permissions for BigQuery`_ - -.. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_bigquery - -.. literalinclude:: snippets.py - :start-after: [START sink_dataset_permissions] - :end-before: [END sink_dataset_permissions] - :dedent: 4 - -Create a BigQuery sink: - -.. literalinclude:: snippets.py - :start-after: [START sink_bigquery_create] - :end-before: [END sink_bigquery_create] - :dedent: 4 - - -Export to Pub/Sub -~~~~~~~~~~~~~~~~~ - -To export logs to BigQuery you must log into the Cloud Platform Console -and add ``cloud-logs@google.com`` to a topic. - -See: `Setting permissions for Pub/Sub`_ - -.. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_logs_to_cloud_pubsub - -.. literalinclude:: snippets.py - :start-after: [START sink_topic_permissions] - :end-before: [END sink_topic_permissions] - :dedent: 4 - -Create a Cloud Pub/Sub sink: - -.. literalinclude:: snippets.py - :start-after: [START sink_pubsub_create] - :end-before: [END sink_pubsub_create] - :dedent: 4 - -Manage Sinks -~~~~~~~~~~~~ - -List all sinks for a project: - -.. literalinclude:: snippets.py - :start-after: [START client_list_sinks] - :end-before: [END client_list_sinks] - :dedent: 4 - -Refresh local information about a sink: - -.. literalinclude:: snippets.py - :start-after: [START sink_reload] - :end-before: [END sink_reload] - :dedent: 4 - -Update a sink: - -.. literalinclude:: snippets.py - :start-after: [START sink_update] - :end-before: [END sink_update] - :dedent: 4 - -Delete a sink: - -.. literalinclude:: snippets.py - :start-after: [START sink_delete] - :end-before: [END sink_delete] - :dedent: 4 - -Integration with Python logging module --------------------------------------- - -It's possible to tie the Python :mod:`logging` module directly into Google -Stackdriver Logging. There are different handler options to accomplish this. -To automatically pick the default for your current environment, use -:meth:`~google.cloud.logging.client.Client.get_default_handler`. - -.. literalinclude:: snippets.py - :start-after: [START create_default_handler] - :end-before: [END create_default_handler] - :dedent: 4 - -It is also possible to attach the handler to the root Python logger, so that -for example a plain ``logging.warn`` call would be sent to Stackdriver Logging, -as well as any other loggers created. A helper method -:meth:`~google.cloud.logging.client.Client.setup_logging` is provided -to configure this automatically. - -.. literalinclude:: snippets.py - :start-after: [START setup_logging] - :end-before: [END setup_logging] - :dedent: 4 - -.. note:: - - To reduce cost and quota usage, do not enable Stackdriver logging - handlers while testing locally. - -You can also exclude certain loggers: - -.. literalinclude:: snippets.py - :start-after: [START setup_logging_excludes] - :end-before: [END setup_logging_excludes] - :dedent: 4 - -Cloud Logging Handler -~~~~~~~~~~~~~~~~~~~~~ - -If you prefer not to use -:meth:`~google.cloud.logging.client.Client.get_default_handler`, you can -directly create a -:class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` instance -which will write directly to the API. - -.. literalinclude:: snippets.py - :start-after: [START create_cloud_handler] - :end-before: [END create_cloud_handler] - :dedent: 4 - -.. note:: - - This handler by default uses an asynchronous transport that sends log - entries on a background thread. However, the API call will still be made - in the same process. For other transport options, see the transports - section. - -All logs will go to a single custom log, which defaults to "python". The name -of the Python logger will be included in the structured log entry under the -"python_logger" field. You can change it by providing a name to the handler: - -.. literalinclude:: snippets.py - :start-after: [START create_named_handler] - :end-before: [END create_named_handler] - :dedent: 4 - -Cloud Logging Handler transports -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` -logging handler can use different transports. The default is -:class:`~google.cloud.logging.handlers.BackgroundThreadTransport`. - - 1. :class:`~google.cloud.logging.handlers.BackgroundThreadTransport` this is - the default. It writes entries on a background - :class:`python.threading.Thread`. - - 1. :class:`~google.cloud.logging.handlers.SyncTransport` this handler does a - direct API call on each logging statement to write the entry. - - -.. _Google Container Engine: https://cloud.google.com/container-engine/ - -fluentd logging handlers -~~~~~~~~~~~~~~~~~~~~~~~~ - -Besides :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler`, -which writes directly to the API, two other handlers are provided. -:class:`~google.cloud.logging.handlers.app_engine.AppEngineHandler`, which is -recommended when running on the Google App Engine Flexible vanilla runtimes -(i.e. your app.yaml contains ``runtime: python``), and -:class:`~google.cloud.logging.handlers.container_engine.ContainerEngineHandler` -, which is recommended when running on `Google Container Engine`_ with the -Stackdriver Logging plugin enabled. - -:meth:`~google.cloud.logging.client.Client.get_default_handler` and -:meth:`~google.cloud.logging.client.Client.setup_logging` will attempt to use -the environment to automatically detect whether the code is running in -these platforms and use the appropriate handler. - -In both cases, the fluentd agent is configured to automatically parse log files -in an expected format and forward them to Stackdriver logging. The handlers -provided help set the correct metadata such as log level so that logs can be -filtered accordingly. diff --git a/logging/docs/v1.rst b/logging/docs/v1.rst deleted file mode 100644 index f4f79d377a65..000000000000 --- a/logging/docs/v1.rst +++ /dev/null @@ -1,18 +0,0 @@ -v1 -============== -.. toctree:: - :maxdepth: 2 - - usage - client - logger - entries - metric - sink - stdlib-usage - handlers - handlers-app-engine - handlers-container-engine - transports-sync - transports-thread - transports-base \ No newline at end of file diff --git a/logging/docs/v2.rst b/logging/docs/v2.rst deleted file mode 100644 index 8dfc18b48171..000000000000 --- a/logging/docs/v2.rst +++ /dev/null @@ -1,7 +0,0 @@ -v2 ----------------- -.. toctree:: - :maxdepth: 2 - - gapic/v2/api - gapic/v2/types \ No newline at end of file diff --git a/logging/google/__init__.py b/logging/google/__init__.py deleted file mode 100644 index 0e1bc5131ba6..000000000000 --- a/logging/google/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/logging/google/cloud/__init__.py b/logging/google/cloud/__init__.py deleted file mode 100644 index 0e1bc5131ba6..000000000000 --- a/logging/google/cloud/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/logging/google/cloud/logging/__init__.py b/logging/google/cloud/logging/__init__.py deleted file mode 100644 index 80de6c4b6113..000000000000 --- a/logging/google/cloud/logging/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Google Stackdriver Logging API wrapper.""" - - -from pkg_resources import get_distribution - -__version__ = get_distribution("google-cloud-logging").version - -from google.cloud.logging.client import Client - - -ASCENDING = "timestamp asc" -"""Query string to order by ascending timestamps.""" -DESCENDING = "timestamp desc" -"""Query string to order by decending timestamps.""" - -__all__ = ["__version__", "ASCENDING", "Client", "DESCENDING"] diff --git a/logging/google/cloud/logging/_gapic.py b/logging/google/cloud/logging/_gapic.py deleted file mode 100644 index 32897c088142..000000000000 --- a/logging/google/cloud/logging/_gapic.py +++ /dev/null @@ -1,574 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrapper for adapting the autogenerated gapic client to the hand-written -client.""" - -import functools - -from google.cloud.logging_v2.gapic.config_service_v2_client import ConfigServiceV2Client -from google.cloud.logging_v2.gapic.logging_service_v2_client import ( - LoggingServiceV2Client, -) -from google.cloud.logging_v2.gapic.metrics_service_v2_client import ( - MetricsServiceV2Client, -) -from google.cloud.logging_v2.proto.logging_config_pb2 import LogSink -from google.cloud.logging_v2.proto.logging_metrics_pb2 import LogMetric -from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry -from google.protobuf.json_format import MessageToDict -from google.protobuf.json_format import ParseDict - -from google.cloud.logging._helpers import entry_from_resource -from google.cloud.logging.sink import Sink -from google.cloud.logging.metric import Metric - - -class _LoggingAPI(object): - """Helper mapping logging-related APIs. - - :type gapic_api: - :class:`.logging_service_v2_client.LoggingServiceV2Client` - :param gapic_api: API object used to make RPCs. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that owns this API object. - """ - - def __init__(self, gapic_api, client): - self._gapic_api = gapic_api - self._client = client - - def list_entries( - self, projects, filter_="", order_by="", page_size=0, page_token=None - ): - """Return a page of log entry resources. - - :type projects: list of strings - :param projects: project IDs to include. If not passed, - defaults to the project bound to the API's client. - - :type filter_: str - :param filter_: - a filter expression. See - https://cloud.google.com/logging/docs/view/advanced_filters - - :type order_by: str - :param order_by: One of :data:`~google.cloud.logging.ASCENDING` - or :data:`~google.cloud.logging.DESCENDING`. - - :type page_size: int - :param page_size: maximum number of entries to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` - accessible to the current API. - """ - page_iter = self._gapic_api.list_log_entries( - [], - project_ids=projects, - filter_=filter_, - order_by=order_by, - page_size=page_size, - ) - page_iter.client = self._client - page_iter.next_page_token = page_token - - # We attach a mutable loggers dictionary so that as Logger - # objects are created by entry_from_resource, they can be - # re-used by other log entries from the same logger. - loggers = {} - page_iter.item_to_value = functools.partial(_item_to_entry, loggers=loggers) - return page_iter - - def write_entries(self, entries, logger_name=None, resource=None, labels=None): - """API call: log an entry resource via a POST request - - :type entries: sequence of mapping - :param entries: the log entry resources to log. - - :type logger_name: str - :param logger_name: name of default logger to which to log the entries; - individual entries may override. - - :type resource: mapping - :param resource: default resource to associate with entries; - individual entries may override. - - :type labels: mapping - :param labels: default labels to associate with entries; - individual entries may override. - """ - partial_success = False - entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] - self._gapic_api.write_log_entries( - entry_pbs, - log_name=logger_name, - resource=resource, - labels=labels, - partial_success=partial_success, - ) - - def logger_delete(self, project, logger_name): - """API call: delete all entries in a logger via a DELETE request - - :type project: str - :param project: ID of project containing the log entries to delete - - :type logger_name: str - :param logger_name: name of logger containing the log entries to delete - """ - path = "projects/%s/logs/%s" % (project, logger_name) - self._gapic_api.delete_log(path) - - -class _SinksAPI(object): - """Helper mapping sink-related APIs. - - :type gapic_api: - :class:`.config_service_v2_client.ConfigServiceV2Client` - :param gapic_api: API object used to make RPCs. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that owns this API object. - """ - - def __init__(self, gapic_api, client): - self._gapic_api = gapic_api - self._client = client - - def list_sinks(self, project, page_size=0, page_token=None): - """List sinks for the project associated with this client. - - :type project: str - :param project: ID of the project whose sinks are to be listed. - - :type page_size: int - :param page_size: maximum number of sinks to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of sinks. If not - passed, the API will return the first page of - sinks. - - :rtype: tuple, (list, str) - :returns: list of mappings, plus a "next page token" string: - if not None, indicates that more sinks can be retrieved - with another call (pass that value as ``page_token``). - """ - path = "projects/%s" % (project,) - page_iter = self._gapic_api.list_sinks(path, page_size=page_size) - page_iter.client = self._client - page_iter.next_page_token = page_token - page_iter.item_to_value = _item_to_sink - return page_iter - - def sink_create( - self, project, sink_name, filter_, destination, unique_writer_identity=False - ): - """API call: create a sink resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create - - :type project: str - :param project: ID of the project in which to create the sink. - - :type sink_name: str - :param sink_name: the name of the sink - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the sink. - - :type destination: str - :param destination: destination URI for the entries exported by - the sink. - - :type unique_writer_identity: bool - :param unique_writer_identity: (Optional) determines the kind of - IAM identity returned as - writer_identity in the new sink. - - :rtype: dict - :returns: The sink resource returned from the API (converted from a - protobuf to a dictionary). - """ - parent = "projects/%s" % (project,) - sink_pb = LogSink(name=sink_name, filter=filter_, destination=destination) - created_pb = self._gapic_api.create_sink( - parent, sink_pb, unique_writer_identity=unique_writer_identity - ) - return MessageToDict(created_pb) - - def sink_get(self, project, sink_name): - """API call: retrieve a sink resource. - - :type project: str - :param project: ID of the project containing the sink. - - :type sink_name: str - :param sink_name: the name of the sink - - :rtype: dict - :returns: The sink object returned from the API (converted from a - protobuf to a dictionary). - """ - path = "projects/%s/sinks/%s" % (project, sink_name) - sink_pb = self._gapic_api.get_sink(path) - # NOTE: LogSink message type does not have an ``Any`` field - # so `MessageToDict`` can safely be used. - return MessageToDict(sink_pb) - - def sink_update( - self, project, sink_name, filter_, destination, unique_writer_identity=False - ): - """API call: update a sink resource. - - :type project: str - :param project: ID of the project containing the sink. - - :type sink_name: str - :param sink_name: the name of the sink - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the sink. - - :type destination: str - :param destination: destination URI for the entries exported by - the sink. - - :type unique_writer_identity: bool - :param unique_writer_identity: (Optional) determines the kind of - IAM identity returned as - writer_identity in the new sink. - - :rtype: dict - :returns: The sink resource returned from the API (converted from a - protobuf to a dictionary). - """ - path = "projects/%s/sinks/%s" % (project, sink_name) - sink_pb = LogSink(name=path, filter=filter_, destination=destination) - sink_pb = self._gapic_api.update_sink( - path, sink_pb, unique_writer_identity=unique_writer_identity - ) - # NOTE: LogSink message type does not have an ``Any`` field - # so `MessageToDict`` can safely be used. - return MessageToDict(sink_pb) - - def sink_delete(self, project, sink_name): - """API call: delete a sink resource. - - :type project: str - :param project: ID of the project containing the sink. - - :type sink_name: str - :param sink_name: the name of the sink - """ - path = "projects/%s/sinks/%s" % (project, sink_name) - self._gapic_api.delete_sink(path) - - -class _MetricsAPI(object): - """Helper mapping sink-related APIs. - - :type gapic_api: - :class:`.metrics_service_v2_client.MetricsServiceV2Client` - - :param gapic_api: API object used to make RPCs. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that owns this API object. - """ - - def __init__(self, gapic_api, client): - self._gapic_api = gapic_api - self._client = client - - def list_metrics(self, project, page_size=0, page_token=None): - """List metrics for the project associated with this client. - - :type project: str - :param project: ID of the project whose metrics are to be listed. - - :type page_size: int - :param page_size: maximum number of metrics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of metrics. If not - passed, the API will return the first page of - metrics. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.logging.metric.Metric` - accessible to the current API. - """ - path = "projects/%s" % (project,) - page_iter = self._gapic_api.list_log_metrics(path, page_size=page_size) - page_iter.client = self._client - page_iter.next_page_token = page_token - page_iter.item_to_value = _item_to_metric - return page_iter - - def metric_create(self, project, metric_name, filter_, description): - """API call: create a metric resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create - - :type project: str - :param project: ID of the project in which to create the metric. - - :type metric_name: str - :param metric_name: the name of the metric - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the metric. - - :type description: str - :param description: description of the metric. - """ - parent = "projects/%s" % (project,) - metric_pb = LogMetric(name=metric_name, filter=filter_, description=description) - self._gapic_api.create_log_metric(parent, metric_pb) - - def metric_get(self, project, metric_name): - """API call: retrieve a metric resource. - - :type project: str - :param project: ID of the project containing the metric. - - :type metric_name: str - :param metric_name: the name of the metric - - :rtype: dict - :returns: The metric object returned from the API (converted from a - protobuf to a dictionary). - """ - path = "projects/%s/metrics/%s" % (project, metric_name) - metric_pb = self._gapic_api.get_log_metric(path) - # NOTE: LogMetric message type does not have an ``Any`` field - # so `MessageToDict`` can safely be used. - return MessageToDict(metric_pb) - - def metric_update(self, project, metric_name, filter_, description): - """API call: update a metric resource. - - :type project: str - :param project: ID of the project containing the metric. - - :type metric_name: str - :param metric_name: the name of the metric - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the metric. - - :type description: str - :param description: description of the metric. - - :rtype: dict - :returns: The metric object returned from the API (converted from a - protobuf to a dictionary). - """ - path = "projects/%s/metrics/%s" % (project, metric_name) - metric_pb = LogMetric(name=path, filter=filter_, description=description) - metric_pb = self._gapic_api.update_log_metric(path, metric_pb) - # NOTE: LogMetric message type does not have an ``Any`` field - # so `MessageToDict`` can safely be used. - return MessageToDict(metric_pb) - - def metric_delete(self, project, metric_name): - """API call: delete a metric resource. - - :type project: str - :param project: ID of the project containing the metric. - - :type metric_name: str - :param metric_name: the name of the metric - """ - path = "projects/%s/metrics/%s" % (project, metric_name) - self._gapic_api.delete_log_metric(path) - - -def _parse_log_entry(entry_pb): - """Special helper to parse ``LogEntry`` protobuf into a dictionary. - - The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This - can be problematic if the type URL in the payload isn't in the - ``google.protobuf`` registry. To help with parsing unregistered types, - this function will remove ``proto_payload`` before parsing. - - :type entry_pb: :class:`.log_entry_pb2.LogEntry` - :param entry_pb: Log entry protobuf. - - :rtype: dict - :returns: The parsed log entry. The ``protoPayload`` key may contain - the raw ``Any`` protobuf from ``entry_pb.proto_payload`` if - it could not be parsed. - """ - try: - return MessageToDict(entry_pb) - except TypeError: - if entry_pb.HasField("proto_payload"): - proto_payload = entry_pb.proto_payload - entry_pb.ClearField("proto_payload") - entry_mapping = MessageToDict(entry_pb) - entry_mapping["protoPayload"] = proto_payload - return entry_mapping - else: - raise - - -def _log_entry_mapping_to_pb(mapping): - """Helper for :meth:`write_entries`, et aliae - - Performs "impedance matching" between the protobuf attrs and - the keys expected in the JSON API. - """ - entry_pb = LogEntry() - # NOTE: We assume ``mapping`` was created in ``Batch.commit`` - # or ``Logger._make_entry_resource``. In either case, if - # the ``protoPayload`` key is present, we assume that the - # type URL is registered with ``google.protobuf`` and will - # not cause any issues in the JSON->protobuf conversion - # of the corresponding ``proto_payload`` in the log entry - # (it is an ``Any`` field). - ParseDict(mapping, entry_pb) - return entry_pb - - -def _item_to_entry(iterator, entry_pb, loggers): - """Convert a log entry protobuf to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.api_core.page_iterator.Iterator`. It is intended to be - patched with a mutable ``loggers`` argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_LoggingAPI.list_entries`. - - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type entry_pb: :class:`.log_entry_pb2.LogEntry` - :param entry_pb: Log entry protobuf returned from the API. - - :type loggers: dict - :param loggers: - A mapping of logger fullnames -> loggers. If the logger - that owns the entry is not in ``loggers``, the entry - will have a newly-created logger. - - :rtype: :class:`~google.cloud.logging.entries._BaseEntry` - :returns: The next log entry in the page. - """ - resource = _parse_log_entry(entry_pb) - return entry_from_resource(resource, iterator.client, loggers) - - -def _item_to_sink(iterator, log_sink_pb): - """Convert a sink protobuf to the native object. - - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type log_sink_pb: - :class:`.logging_config_pb2.LogSink` - :param log_sink_pb: Sink protobuf returned from the API. - - :rtype: :class:`~google.cloud.logging.sink.Sink` - :returns: The next sink in the page. - """ - # NOTE: LogSink message type does not have an ``Any`` field - # so `MessageToDict`` can safely be used. - resource = MessageToDict(log_sink_pb) - return Sink.from_api_repr(resource, iterator.client) - - -def _item_to_metric(iterator, log_metric_pb): - """Convert a metric protobuf to the native object. - - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type log_metric_pb: - :class:`.logging_metrics_pb2.LogMetric` - :param log_metric_pb: Metric protobuf returned from the API. - - :rtype: :class:`~google.cloud.logging.metric.Metric` - :returns: The next metric in the page. - """ - # NOTE: LogMetric message type does not have an ``Any`` field - # so `MessageToDict`` can safely be used. - resource = MessageToDict(log_metric_pb) - return Metric.from_api_repr(resource, iterator.client) - - -def make_logging_api(client): - """Create an instance of the Logging API adapter. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that holds configuration details. - - :rtype: :class:`_LoggingAPI` - :returns: A metrics API instance with the proper credentials. - """ - generated = LoggingServiceV2Client( - credentials=client._credentials, client_info=client._client_info - ) - return _LoggingAPI(generated, client) - - -def make_metrics_api(client): - """Create an instance of the Metrics API adapter. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that holds configuration details. - - :rtype: :class:`_MetricsAPI` - :returns: A metrics API instance with the proper credentials. - """ - generated = MetricsServiceV2Client( - credentials=client._credentials, client_info=client._client_info - ) - return _MetricsAPI(generated, client) - - -def make_sinks_api(client): - """Create an instance of the Sinks API adapter. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that holds configuration details. - - :rtype: :class:`_SinksAPI` - :returns: A metrics API instance with the proper credentials. - """ - generated = ConfigServiceV2Client( - credentials=client._credentials, client_info=client._client_info - ) - return _SinksAPI(generated, client) diff --git a/logging/google/cloud/logging/_helpers.py b/logging/google/cloud/logging/_helpers.py deleted file mode 100644 index 4df8b12736bc..000000000000 --- a/logging/google/cloud/logging/_helpers.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Common logging helpers.""" - -import logging - -import requests - -from google.cloud.logging.entries import LogEntry -from google.cloud.logging.entries import ProtobufEntry -from google.cloud.logging.entries import StructEntry -from google.cloud.logging.entries import TextEntry - -try: - from google.cloud.logging_v2.gapic.enums import LogSeverity -except ImportError: # pragma: NO COVER - - class LogSeverity(object): - """Map severities for non-GAPIC usage.""" - - DEFAULT = 0 - DEBUG = 100 - INFO = 200 - NOTICE = 300 - WARNING = 400 - ERROR = 500 - CRITICAL = 600 - ALERT = 700 - EMERGENCY = 800 - - -_NORMALIZED_SEVERITIES = { - logging.CRITICAL: LogSeverity.CRITICAL, - logging.ERROR: LogSeverity.ERROR, - logging.WARNING: LogSeverity.WARNING, - logging.INFO: LogSeverity.INFO, - logging.DEBUG: LogSeverity.DEBUG, - logging.NOTSET: LogSeverity.DEFAULT, -} - -METADATA_URL = "http://metadata.google.internal./computeMetadata/v1/" -METADATA_HEADERS = {"Metadata-Flavor": "Google"} - - -def entry_from_resource(resource, client, loggers): - """Detect correct entry type from resource and instantiate. - - :type resource: dict - :param resource: One entry resource from API response. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: Client that owns the log entry. - - :type loggers: dict - :param loggers: - A mapping of logger fullnames -> loggers. If the logger - that owns the entry is not in ``loggers``, the entry - will have a newly-created logger. - - :rtype: :class:`~google.cloud.logging.entries._BaseEntry` - :returns: The entry instance, constructed via the resource - """ - if "textPayload" in resource: - return TextEntry.from_api_repr(resource, client, loggers) - - if "jsonPayload" in resource: - return StructEntry.from_api_repr(resource, client, loggers) - - if "protoPayload" in resource: - return ProtobufEntry.from_api_repr(resource, client, loggers) - - return LogEntry.from_api_repr(resource, client, loggers) - - -def retrieve_metadata_server(metadata_key): - """Retrieve the metadata key in the metadata server. - - See: https://cloud.google.com/compute/docs/storing-retrieving-metadata - - :type metadata_key: str - :param metadata_key: Key of the metadata which will form the url. You can - also supply query parameters after the metadata key. - e.g. "tags?alt=json" - - :rtype: str - :returns: The value of the metadata key returned by the metadata server. - """ - url = METADATA_URL + metadata_key - - try: - response = requests.get(url, headers=METADATA_HEADERS) - - if response.status_code == requests.codes.ok: - return response.text - - except requests.exceptions.RequestException: - # Ignore the exception, connection failed means the attribute does not - # exist in the metadata server. - pass - - return None - - -def _normalize_severity(stdlib_level): - """Normalize a Python stdlib severity to LogSeverity enum. - - :type stdlib_level: int - :param stdlib_level: 'levelno' from a :class:`logging.LogRecord` - - :rtype: int - :returns: Corresponding Stackdriver severity. - """ - return _NORMALIZED_SEVERITIES.get(stdlib_level, stdlib_level) diff --git a/logging/google/cloud/logging/_http.py b/logging/google/cloud/logging/_http.py deleted file mode 100644 index deb6b394f49d..000000000000 --- a/logging/google/cloud/logging/_http.py +++ /dev/null @@ -1,540 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Interact with Stackdriver Logging via JSON-over-HTTP.""" - -import functools - -from google.api_core import page_iterator -from google.cloud import _http - -from google.cloud.logging import __version__ -from google.cloud.logging._helpers import entry_from_resource -from google.cloud.logging.sink import Sink -from google.cloud.logging.metric import Metric - - -class Connection(_http.JSONConnection): - """A connection to Google Stackdriver Logging via the JSON REST API. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that owns the current connection. - - :type client_info: :class:`~google.api_core.client_info.ClientInfo` - :param client_info: (Optional) instance used to generate user agent. - - :type client_options: :class:`~google.api_core.client_options.ClientOptions` - :param client_options (Optional) Client options used to set user options - on the client. API Endpoint should be set through client_options. - """ - - DEFAULT_API_ENDPOINT = "https://logging.googleapis.com" - - def __init__(self, client, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT): - super(Connection, self).__init__(client, client_info) - self.API_BASE_URL = api_endpoint - self._client_info.gapic_version = __version__ - self._client_info.client_library_version = __version__ - - API_VERSION = "v2" - """The version of the API, used in building the API call's URL.""" - - API_URL_TEMPLATE = "{api_base_url}/{api_version}{path}" - """A template for the URL of a particular API call.""" - - -class _LoggingAPI(object): - """Helper mapping logging-related APIs. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_entries( - self, projects, filter_=None, order_by=None, page_size=None, page_token=None - ): - """Return a page of log entry resources. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list - - :type projects: list of strings - :param projects: project IDs to include. If not passed, - defaults to the project bound to the client. - - :type filter_: str - :param filter_: - a filter expression. See - https://cloud.google.com/logging/docs/view/advanced_filters - - :type order_by: str - :param order_by: One of :data:`~google.cloud.logging.ASCENDING` - or :data:`~google.cloud.logging.DESCENDING`. - - :type page_size: int - :param page_size: maximum number of entries to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` - accessible to the current API. - """ - extra_params = {"projectIds": projects} - - if filter_ is not None: - extra_params["filter"] = filter_ - - if order_by is not None: - extra_params["orderBy"] = order_by - - if page_size is not None: - extra_params["pageSize"] = page_size - - path = "/entries:list" - # We attach a mutable loggers dictionary so that as Logger - # objects are created by entry_from_resource, they can be - # re-used by other log entries from the same logger. - loggers = {} - item_to_value = functools.partial(_item_to_entry, loggers=loggers) - iterator = page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=item_to_value, - items_key="entries", - page_token=page_token, - extra_params=extra_params, - ) - # This method uses POST to make a read-only request. - iterator._HTTP_METHOD = "POST" - return iterator - - def write_entries(self, entries, logger_name=None, resource=None, labels=None): - """API call: log an entry resource via a POST request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write - - :type entries: sequence of mapping - :param entries: the log entry resources to log. - - :type logger_name: str - :param logger_name: name of default logger to which to log the entries; - individual entries may override. - - :type resource: mapping - :param resource: default resource to associate with entries; - individual entries may override. - - :type labels: mapping - :param labels: default labels to associate with entries; - individual entries may override. - """ - data = {"entries": list(entries)} - - if logger_name is not None: - data["logName"] = logger_name - - if resource is not None: - data["resource"] = resource - - if labels is not None: - data["labels"] = labels - - self.api_request(method="POST", path="/entries:write", data=data) - - def logger_delete(self, project, logger_name): - """API call: delete all entries in a logger via a DELETE request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs/delete - - :type project: str - :param project: ID of project containing the log entries to delete - - :type logger_name: str - :param logger_name: name of logger containing the log entries to delete - """ - path = "/projects/%s/logs/%s" % (project, logger_name) - self.api_request(method="DELETE", path=path) - - -class _SinksAPI(object): - """Helper mapping sink-related APIs. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_sinks(self, project, page_size=None, page_token=None): - """List sinks for the project associated with this client. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/list - - :type project: str - :param project: ID of the project whose sinks are to be listed. - - :type page_size: int - :param page_size: maximum number of sinks to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of sinks. If not - passed, the API will return the first page of - sinks. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.logging.sink.Sink` - accessible to the current API. - """ - extra_params = {} - - if page_size is not None: - extra_params["pageSize"] = page_size - - path = "/projects/%s/sinks" % (project,) - return page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=_item_to_sink, - items_key="sinks", - page_token=page_token, - extra_params=extra_params, - ) - - def sink_create( - self, project, sink_name, filter_, destination, unique_writer_identity=False - ): - """API call: create a sink resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create - - :type project: str - :param project: ID of the project in which to create the sink. - - :type sink_name: str - :param sink_name: the name of the sink - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the sink. - - :type destination: str - :param destination: destination URI for the entries exported by - the sink. - - :type unique_writer_identity: bool - :param unique_writer_identity: (Optional) determines the kind of - IAM identity returned as - writer_identity in the new sink. - - :rtype: dict - :returns: The returned (created) resource. - """ - target = "/projects/%s/sinks" % (project,) - data = {"name": sink_name, "filter": filter_, "destination": destination} - query_params = {"uniqueWriterIdentity": unique_writer_identity} - return self.api_request( - method="POST", path=target, data=data, query_params=query_params - ) - - def sink_get(self, project, sink_name): - """API call: retrieve a sink resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get - - :type project: str - :param project: ID of the project containing the sink. - - :type sink_name: str - :param sink_name: the name of the sink - - :rtype: dict - :returns: The JSON sink object returned from the API. - """ - target = "/projects/%s/sinks/%s" % (project, sink_name) - return self.api_request(method="GET", path=target) - - def sink_update( - self, project, sink_name, filter_, destination, unique_writer_identity=False - ): - """API call: update a sink resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/update - - :type project: str - :param project: ID of the project containing the sink. - - :type sink_name: str - :param sink_name: the name of the sink - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the sink. - - :type destination: str - :param destination: destination URI for the entries exported by - the sink. - - :type unique_writer_identity: bool - :param unique_writer_identity: (Optional) determines the kind of - IAM identity returned as - writer_identity in the new sink. - - :rtype: dict - :returns: The returned (updated) resource. - """ - target = "/projects/%s/sinks/%s" % (project, sink_name) - data = {"name": sink_name, "filter": filter_, "destination": destination} - query_params = {"uniqueWriterIdentity": unique_writer_identity} - return self.api_request( - method="PUT", path=target, query_params=query_params, data=data - ) - - def sink_delete(self, project, sink_name): - """API call: delete a sink resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/delete - - :type project: str - :param project: ID of the project containing the sink. - - :type sink_name: str - :param sink_name: the name of the sink - """ - target = "/projects/%s/sinks/%s" % (project, sink_name) - self.api_request(method="DELETE", path=target) - - -class _MetricsAPI(object): - """Helper mapping sink-related APIs. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_metrics(self, project, page_size=None, page_token=None): - """List metrics for the project associated with this client. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list - - :type project: str - :param project: ID of the project whose metrics are to be listed. - - :type page_size: int - :param page_size: maximum number of metrics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of metrics. If not - passed, the API will return the first page of - metrics. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.logging.metric.Metric` - accessible to the current API. - """ - extra_params = {} - - if page_size is not None: - extra_params["pageSize"] = page_size - - path = "/projects/%s/metrics" % (project,) - return page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=_item_to_metric, - items_key="metrics", - page_token=page_token, - extra_params=extra_params, - ) - - def metric_create(self, project, metric_name, filter_, description=None): - """API call: create a metric resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create - - :type project: str - :param project: ID of the project in which to create the metric. - - :type metric_name: str - :param metric_name: the name of the metric - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the metric. - - :type description: str - :param description: description of the metric. - """ - target = "/projects/%s/metrics" % (project,) - data = {"name": metric_name, "filter": filter_, "description": description} - self.api_request(method="POST", path=target, data=data) - - def metric_get(self, project, metric_name): - """API call: retrieve a metric resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get - - :type project: str - :param project: ID of the project containing the metric. - - :type metric_name: str - :param metric_name: the name of the metric - - :rtype: dict - :returns: The JSON metric object returned from the API. - """ - target = "/projects/%s/metrics/%s" % (project, metric_name) - return self.api_request(method="GET", path=target) - - def metric_update(self, project, metric_name, filter_, description): - """API call: update a metric resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update - - :type project: str - :param project: ID of the project containing the metric. - - :type metric_name: str - :param metric_name: the name of the metric - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the metric. - - :type description: str - :param description: description of the metric. - - :rtype: dict - :returns: The returned (updated) resource. - """ - target = "/projects/%s/metrics/%s" % (project, metric_name) - data = {"name": metric_name, "filter": filter_, "description": description} - return self.api_request(method="PUT", path=target, data=data) - - def metric_delete(self, project, metric_name): - """API call: delete a metric resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/delete - - :type project: str - :param project: ID of the project containing the metric. - - :type metric_name: str - :param metric_name: the name of the metric. - """ - target = "/projects/%s/metrics/%s" % (project, metric_name) - self.api_request(method="DELETE", path=target) - - -def _item_to_entry(iterator, resource, loggers): - """Convert a log entry resource to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.api_core.page_iterator.Iterator`. It is intended to be - patched with a mutable ``loggers`` argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_LoggingAPI.list_entries`. - - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: Log entry JSON resource returned from the API. - - :type loggers: dict - :param loggers: - A mapping of logger fullnames -> loggers. If the logger - that owns the entry is not in ``loggers``, the entry - will have a newly-created logger. - - :rtype: :class:`~google.cloud.logging.entries._BaseEntry` - :returns: The next log entry in the page. - """ - return entry_from_resource(resource, iterator.client, loggers) - - -def _item_to_sink(iterator, resource): - """Convert a sink resource to the native object. - - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: Sink JSON resource returned from the API. - - :rtype: :class:`~google.cloud.logging.sink.Sink` - :returns: The next sink in the page. - """ - return Sink.from_api_repr(resource, iterator.client) - - -def _item_to_metric(iterator, resource): - """Convert a metric resource to the native object. - - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: Metric JSON resource returned from the API. - - :rtype: :class:`~google.cloud.logging.metric.Metric` - :returns: The next metric in the page. - """ - return Metric.from_api_repr(resource, iterator.client) diff --git a/logging/google/cloud/logging/client.py b/logging/google/cloud/logging/client.py deleted file mode 100644 index 680c29c8a9dd..000000000000 --- a/logging/google/cloud/logging/client.py +++ /dev/null @@ -1,400 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Client for interacting with the Google Stackdriver Logging API.""" - -import logging -import os - -try: - from google.cloud.logging import _gapic -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False - _gapic = None -else: - _HAVE_GRPC = True - -import google.api_core.client_options -from google.cloud.client import ClientWithProject -from google.cloud.environment_vars import DISABLE_GRPC -from google.cloud.logging._helpers import retrieve_metadata_server -from google.cloud.logging._http import Connection -from google.cloud.logging._http import _LoggingAPI as JSONLoggingAPI -from google.cloud.logging._http import _MetricsAPI as JSONMetricsAPI -from google.cloud.logging._http import _SinksAPI as JSONSinksAPI -from google.cloud.logging.handlers import CloudLoggingHandler -from google.cloud.logging.handlers import AppEngineHandler -from google.cloud.logging.handlers import ContainerEngineHandler -from google.cloud.logging.handlers import setup_logging -from google.cloud.logging.handlers.handlers import EXCLUDED_LOGGER_DEFAULTS - -from google.cloud.logging.logger import Logger -from google.cloud.logging.metric import Metric -from google.cloud.logging.sink import Sink - - -_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) -_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC - -_APPENGINE_FLEXIBLE_ENV_VM = "GAE_APPENGINE_HOSTNAME" -"""Environment variable set in App Engine when vm:true is set.""" - -_APPENGINE_INSTANCE_ID = "GAE_INSTANCE" -"""Environment variable set in App Engine standard and flexible environment.""" - -_GKE_CLUSTER_NAME = "instance/attributes/cluster-name" -"""Attribute in metadata server when in GKE environment.""" - - -class Client(ClientWithProject): - """Client to bundle configuration needed for API requests. - - :type project: str - :param project: the project which the client acts on behalf of. - If not passed, falls back to the default inferred - from the environment. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) The OAuth2 Credentials to use for this - client. If not passed (and if no ``_http`` object is - passed), falls back to the default inferred from the - environment. - - :type _http: :class:`~requests.Session` - :param _http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`requests.Session.request`. If not passed, an - ``_http`` object is created that is bound to the - ``credentials`` for the current object. - This parameter should be considered private, and could - change in the future. - - :type _use_grpc: bool - :param _use_grpc: (Optional) Explicitly specifies whether - to use the gRPC transport or HTTP. If unset, - falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` - environment variable - This parameter should be considered private, and could - change in the future. - - :type client_info: - :class:`google.api_core.client_info.ClientInfo` or - :class:`google.api_core.gapic_v1.client_info.ClientInfo` - :param client_info: - The client info used to send a user-agent string along with API - requests. If ``None``, then default info will be used. Generally, - you only need to set this if you're developing your own library - or partner tool. - :type client_options: :class:`~google.api_core.client_options.ClientOptions` - or :class:`dict` - :param client_options: (Optional) Client options used to set user options - on the client. API Endpoint should be set through client_options. - """ - - _logging_api = None - _sinks_api = None - _metrics_api = None - - SCOPE = ( - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/cloud-platform", - ) - """The scopes required for authenticating as a Logging consumer.""" - - def __init__( - self, - project=None, - credentials=None, - _http=None, - _use_grpc=None, - client_info=None, - client_options=None, - ): - super(Client, self).__init__( - project=project, credentials=credentials, _http=_http - ) - - kw_args = {"client_info": client_info} - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - kw_args["api_endpoint"] = api_endpoint - - self._connection = Connection(self, **kw_args) - - self._client_info = client_info - if _use_grpc is None: - self._use_grpc = _USE_GRPC - else: - self._use_grpc = _use_grpc - - @property - def logging_api(self): - """Helper for logging-related API calls. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs - """ - if self._logging_api is None: - if self._use_grpc: - self._logging_api = _gapic.make_logging_api(self) - else: - self._logging_api = JSONLoggingAPI(self) - return self._logging_api - - @property - def sinks_api(self): - """Helper for log sink-related API calls. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks - """ - if self._sinks_api is None: - if self._use_grpc: - self._sinks_api = _gapic.make_sinks_api(self) - else: - self._sinks_api = JSONSinksAPI(self) - return self._sinks_api - - @property - def metrics_api(self): - """Helper for log metric-related API calls. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics - """ - if self._metrics_api is None: - if self._use_grpc: - self._metrics_api = _gapic.make_metrics_api(self) - else: - self._metrics_api = JSONMetricsAPI(self) - return self._metrics_api - - def logger(self, name): - """Creates a logger bound to the current client. - - :type name: str - :param name: the name of the logger to be constructed. - - :rtype: :class:`google.cloud.logging.logger.Logger` - :returns: Logger created with the current client. - """ - return Logger(name, client=self) - - def list_entries( - self, - projects=None, - filter_=None, - order_by=None, - page_size=None, - page_token=None, - ): - """Return a page of log entries. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list - - :type projects: list of strings - :param projects: project IDs to include. If not passed, - defaults to the project bound to the client. - - :type filter_: str - :param filter_: - a filter expression. See - https://cloud.google.com/logging/docs/view/advanced_filters - - :type order_by: str - :param order_by: One of :data:`~google.cloud.logging.ASCENDING` - or :data:`~google.cloud.logging.DESCENDING`. - - :type page_size: int - :param page_size: - Optional. The maximum number of entries in each page of results - from this request. Non-positive values are ignored. Defaults - to a sensible value set by the API. - - :type page_token: str - :param page_token: - Optional. If present, return the next batch of entries, using - the value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property of the returned iterator instead of manually passing - the token. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` - accessible to the current client. - """ - if projects is None: - projects = [self.project] - - return self.logging_api.list_entries( - projects=projects, - filter_=filter_, - order_by=order_by, - page_size=page_size, - page_token=page_token, - ) - - def sink(self, name, filter_=None, destination=None): - """Creates a sink bound to the current client. - - :type name: str - :param name: the name of the sink to be constructed. - - :type filter_: str - :param filter_: (optional) the advanced logs filter expression - defining the entries exported by the sink. If not - passed, the instance should already exist, to be - refreshed via :meth:`Sink.reload`. - - :type destination: str - :param destination: destination URI for the entries exported by - the sink. If not passed, the instance should - already exist, to be refreshed via - :meth:`Sink.reload`. - - :rtype: :class:`google.cloud.logging.sink.Sink` - :returns: Sink created with the current client. - """ - return Sink(name, filter_, destination, client=self) - - def list_sinks(self, page_size=None, page_token=None): - """List sinks for the project associated with this client. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/list - - :type page_size: int - :param page_size: - Optional. The maximum number of sinks in each page of results from - this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - - :type page_token: str - :param page_token: - Optional. If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property of the returned iterator instead of manually passing the - token. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.logging.sink.Sink` - accessible to the current client. - """ - return self.sinks_api.list_sinks(self.project, page_size, page_token) - - def metric(self, name, filter_=None, description=""): - """Creates a metric bound to the current client. - - :type name: str - :param name: the name of the metric to be constructed. - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries tracked by the metric. If not - passed, the instance should already exist, to be - refreshed via :meth:`Metric.reload`. - - :type description: str - :param description: the description of the metric to be constructed. - If not passed, the instance should already exist, - to be refreshed via :meth:`Metric.reload`. - - :rtype: :class:`google.cloud.logging.metric.Metric` - :returns: Metric created with the current client. - """ - return Metric(name, filter_, client=self, description=description) - - def list_metrics(self, page_size=None, page_token=None): - """List metrics for the project associated with this client. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list - - :type page_size: int - :param page_size: - Optional. The maximum number of metrics in each page of results - from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - - :type page_token: str - :param page_token: - Optional. If present, return the next batch of metrics, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property of the returned iterator instead of manually passing the - token. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.logging.metric.Metric` - accessible to the current client. - """ - return self.metrics_api.list_metrics(self.project, page_size, page_token) - - def get_default_handler(self, **kw): - """Return the default logging handler based on the local environment. - - :type kw: dict - :param kw: keyword args passed to handler constructor - - :rtype: :class:`logging.Handler` - :returns: The default log handler based on the environment - """ - gke_cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) - - if ( - _APPENGINE_FLEXIBLE_ENV_VM in os.environ - or _APPENGINE_INSTANCE_ID in os.environ - ): - return AppEngineHandler(self, **kw) - elif gke_cluster_name is not None: - return ContainerEngineHandler(**kw) - else: - return CloudLoggingHandler(self, **kw) - - def setup_logging( - self, log_level=logging.INFO, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, **kw - ): - """Attach default Stackdriver logging handler to the root logger. - - This method uses the default log handler, obtained by - :meth:`~get_default_handler`, and attaches it to the root Python - logger, so that a call such as ``logging.warn``, as well as all child - loggers, will report to Stackdriver logging. - - :type log_level: int - :param log_level: (Optional) Python logging log level. Defaults to - :const:`logging.INFO`. - - :type excluded_loggers: tuple - :param excluded_loggers: (Optional) The loggers to not attach the - handler to. This will always include the - loggers in the path of the logging client - itself. - - :type kw: dict - :param kw: keyword args passed to handler constructor - """ - handler = self.get_default_handler(**kw) - setup_logging(handler, log_level=log_level, excluded_loggers=excluded_loggers) diff --git a/logging/google/cloud/logging/entries.py b/logging/google/cloud/logging/entries.py deleted file mode 100644 index ed1c28163f60..000000000000 --- a/logging/google/cloud/logging/entries.py +++ /dev/null @@ -1,371 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Log entries within the Google Stackdriver Logging API.""" - -import collections -import json -import re - -from google.protobuf.any_pb2 import Any -from google.protobuf.json_format import MessageToDict -from google.protobuf.json_format import Parse - -from google.cloud.logging.resource import Resource -from google.cloud._helpers import _name_from_project_path -from google.cloud._helpers import _rfc3339_nanos_to_datetime -from google.cloud._helpers import _datetime_to_rfc3339 - - -_GLOBAL_RESOURCE = Resource(type="global", labels={}) - - -_LOGGER_TEMPLATE = re.compile( - r""" - projects/ # static prefix - (?P[^/]+) # initial letter, wordchars + hyphen - /logs/ # static midfix - (?P[^/]+) # initial letter, wordchars + allowed punc -""", - re.VERBOSE, -) - - -def logger_name_from_path(path): - """Validate a logger URI path and get the logger name. - - :type path: str - :param path: URI path for a logger API request. - - :rtype: str - :returns: Logger name parsed from ``path``. - :raises: :class:`ValueError` if the ``path`` is ill-formed or if - the project from the ``path`` does not agree with the - ``project`` passed in. - """ - return _name_from_project_path(path, None, _LOGGER_TEMPLATE) - - -def _int_or_none(value): - """Helper: return an integer or ``None``.""" - if value is not None: - value = int(value) - return value - - -_LOG_ENTRY_FIELDS = ( # (name, default) - ("log_name", None), - ("labels", None), - ("insert_id", None), - ("severity", None), - ("http_request", None), - ("timestamp", None), - ("resource", _GLOBAL_RESOURCE), - ("trace", None), - ("span_id", None), - ("trace_sampled", None), - ("source_location", None), - ("operation", None), - ("logger", None), - ("payload", None), -) - - -_LogEntryTuple = collections.namedtuple( - "LogEntry", (field for field, _ in _LOG_ENTRY_FIELDS) -) - -_LogEntryTuple.__new__.__defaults__ = tuple(default for _, default in _LOG_ENTRY_FIELDS) - - -_LOG_ENTRY_PARAM_DOCSTRING = """\ - - :type log_name: str - :param log_name: the name of the logger used to post the entry. - - :type labels: dict - :param labels: (optional) mapping of labels for the entry - - :type insert_id: text - :param insert_id: (optional) the ID used to identify an entry uniquely. - - :type severity: str - :param severity: (optional) severity of event being logged. - - :type http_request: dict - :param http_request: (optional) info about HTTP request associated with - the entry. - - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp for the entry - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry - - :type trace: str - :param trace: (optional) traceid to apply to the entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. - - :type trace_sampled: bool - :param trace_sampled: (optional) the sampling decision of the trace - associated with the log entry. - - :type source_location: dict - :param source_location: (optional) location in source code from which - the entry was emitted. - - :type operation: dict - :param operation: (optional) additional information about a potentially - long-running operation associated with the log entry. - - :type logger: :class:`google.cloud.logging.logger.Logger` - :param logger: the logger used to write the entry. - -""" - -_LOG_ENTRY_SEE_ALSO_DOCSTRING = """\ - - See: - https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry -""" - - -class LogEntry(_LogEntryTuple): - __doc__ = ( - """ - Log entry. - - """ - + _LOG_ENTRY_PARAM_DOCSTRING - + _LOG_ENTRY_SEE_ALSO_DOCSTRING - ) - - received_timestamp = None - - @classmethod - def _extract_payload(cls, resource): - """Helper for :meth:`from_api_repr`""" - return None - - @classmethod - def from_api_repr(cls, resource, client, loggers=None): - """Factory: construct an entry given its API representation - - :type resource: dict - :param resource: text entry resource representation returned from - the API - - :type client: :class:`google.cloud.logging.client.Client` - :param client: Client which holds credentials and project - configuration. - - :type loggers: dict - :param loggers: - (Optional) A mapping of logger fullnames -> loggers. If not - passed, the entry will have a newly-created logger. - - :rtype: :class:`google.cloud.logging.entries.LogEntry` - :returns: Log entry parsed from ``resource``. - """ - if loggers is None: - loggers = {} - logger_fullname = resource["logName"] - logger = loggers.get(logger_fullname) - if logger is None: - logger_name = logger_name_from_path(logger_fullname) - logger = loggers[logger_fullname] = client.logger(logger_name) - payload = cls._extract_payload(resource) - insert_id = resource.get("insertId") - timestamp = resource.get("timestamp") - if timestamp is not None: - timestamp = _rfc3339_nanos_to_datetime(timestamp) - labels = resource.get("labels") - severity = resource.get("severity") - http_request = resource.get("httpRequest") - trace = resource.get("trace") - span_id = resource.get("spanId") - trace_sampled = resource.get("traceSampled") - source_location = resource.get("sourceLocation") - if source_location is not None: - line = source_location.pop("line", None) - source_location["line"] = _int_or_none(line) - operation = resource.get("operation") - - monitored_resource_dict = resource.get("resource") - monitored_resource = None - if monitored_resource_dict is not None: - monitored_resource = Resource._from_dict(monitored_resource_dict) - - inst = cls( - log_name=logger_fullname, - insert_id=insert_id, - timestamp=timestamp, - labels=labels, - severity=severity, - http_request=http_request, - resource=monitored_resource, - trace=trace, - span_id=span_id, - trace_sampled=trace_sampled, - source_location=source_location, - operation=operation, - logger=logger, - payload=payload, - ) - received = resource.get("receiveTimestamp") - if received is not None: - inst.received_timestamp = _rfc3339_nanos_to_datetime(received) - return inst - - def to_api_repr(self): - """API repr (JSON format) for entry. - """ - info = {} - if self.log_name is not None: - info["logName"] = self.log_name - if self.resource is not None: - info["resource"] = self.resource._to_dict() - if self.labels is not None: - info["labels"] = self.labels - if self.insert_id is not None: - info["insertId"] = self.insert_id - if self.severity is not None: - info["severity"] = self.severity - if self.http_request is not None: - info["httpRequest"] = self.http_request - if self.timestamp is not None: - info["timestamp"] = _datetime_to_rfc3339(self.timestamp) - if self.trace is not None: - info["trace"] = self.trace - if self.span_id is not None: - info["spanId"] = self.span_id - if self.trace_sampled is not None: - info["traceSampled"] = self.trace_sampled - if self.source_location is not None: - source_location = self.source_location.copy() - source_location["line"] = str(source_location.pop("line", 0)) - info["sourceLocation"] = source_location - if self.operation is not None: - info["operation"] = self.operation - return info - - -class TextEntry(LogEntry): - __doc__ = ( - """ - Log entry with text payload. - - """ - + _LOG_ENTRY_PARAM_DOCSTRING - + """ - - :type payload: str | unicode - :param payload: payload for the log entry. - """ - + _LOG_ENTRY_SEE_ALSO_DOCSTRING - ) - - @classmethod - def _extract_payload(cls, resource): - """Helper for :meth:`from_api_repr`""" - return resource["textPayload"] - - def to_api_repr(self): - """API repr (JSON format) for entry. - """ - info = super(TextEntry, self).to_api_repr() - info["textPayload"] = self.payload - return info - - -class StructEntry(LogEntry): - __doc__ = ( - """ - Log entry with JSON payload. - - """ - + _LOG_ENTRY_PARAM_DOCSTRING - + """ - - :type payload: dict - :param payload: payload for the log entry. - """ - + _LOG_ENTRY_SEE_ALSO_DOCSTRING - ) - - @classmethod - def _extract_payload(cls, resource): - """Helper for :meth:`from_api_repr`""" - return resource["jsonPayload"] - - def to_api_repr(self): - """API repr (JSON format) for entry. - """ - info = super(StructEntry, self).to_api_repr() - info["jsonPayload"] = self.payload - return info - - -class ProtobufEntry(LogEntry): - __doc__ = ( - """ - Log entry with protobuf message payload. - - """ - + _LOG_ENTRY_PARAM_DOCSTRING - + """ - - :type payload: protobuf message - :param payload: payload for the log entry. - """ - + _LOG_ENTRY_SEE_ALSO_DOCSTRING - ) - - @classmethod - def _extract_payload(cls, resource): - """Helper for :meth:`from_api_repr`""" - return resource["protoPayload"] - - @property - def payload_pb(self): - if isinstance(self.payload, Any): - return self.payload - - @property - def payload_json(self): - if not isinstance(self.payload, Any): - return self.payload - - def to_api_repr(self): - """API repr (JSON format) for entry. - """ - info = super(ProtobufEntry, self).to_api_repr() - info["protoPayload"] = MessageToDict(self.payload) - return info - - def parse_message(self, message): - """Parse payload into a protobuf message. - - Mutates the passed-in ``message`` in place. - - :type message: Protobuf message - :param message: the message to be logged - """ - # NOTE: This assumes that ``payload`` is already a deserialized - # ``Any`` field and ``message`` has come from an imported - # ``pb2`` module with the relevant protobuf message type. - Parse(json.dumps(self.payload), message) diff --git a/logging/google/cloud/logging/handlers/__init__.py b/logging/google/cloud/logging/handlers/__init__.py deleted file mode 100644 index 67b96c95e907..000000000000 --- a/logging/google/cloud/logging/handlers/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Python :mod:`logging` handlers for Google Cloud Logging.""" - -from google.cloud.logging.handlers.app_engine import AppEngineHandler -from google.cloud.logging.handlers.container_engine import ContainerEngineHandler -from google.cloud.logging.handlers.handlers import CloudLoggingHandler -from google.cloud.logging.handlers.handlers import setup_logging - -__all__ = [ - "AppEngineHandler", - "CloudLoggingHandler", - "ContainerEngineHandler", - "setup_logging", -] diff --git a/logging/google/cloud/logging/handlers/_helpers.py b/logging/google/cloud/logging/handlers/_helpers.py deleted file mode 100644 index d65a2690f8f7..000000000000 --- a/logging/google/cloud/logging/handlers/_helpers.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright 2016 Google LLC All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helper functions for logging handlers.""" - -import math -import json - -try: - import flask -except ImportError: # pragma: NO COVER - flask = None - -try: - import webapp2 -except (ImportError, SyntaxError): # pragma: NO COVER - # If you try to import webapp2 under python3, you'll get a syntax - # error (since it hasn't been ported yet). We just pretend it - # doesn't exist. This is unlikely to hit in real life but does - # in the tests. - webapp2 = None - -from google.cloud.logging.handlers.middleware.request import _get_django_request - -_DJANGO_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" -_FLASK_TRACE_HEADER = "X_CLOUD_TRACE_CONTEXT" -_WEBAPP2_TRACE_HEADER = "X-CLOUD-TRACE-CONTEXT" - - -def format_stackdriver_json(record, message): - """Helper to format a LogRecord in in Stackdriver fluentd format. - - :rtype: str - :returns: JSON str to be written to the log file. - """ - subsecond, second = math.modf(record.created) - - payload = { - "message": message, - "timestamp": {"seconds": int(second), "nanos": int(subsecond * 1e9)}, - "thread": record.thread, - "severity": record.levelname, - } - - return json.dumps(payload) - - -def get_trace_id_from_flask(): - """Get trace_id from flask request headers. - - :rtype: str - :returns: TraceID in HTTP request headers. - """ - if flask is None or not flask.request: - return None - - header = flask.request.headers.get(_FLASK_TRACE_HEADER) - - if header is None: - return None - - trace_id = header.split("/", 1)[0] - - return trace_id - - -def get_trace_id_from_webapp2(): - """Get trace_id from webapp2 request headers. - - :rtype: str - :returns: TraceID in HTTP request headers. - """ - if webapp2 is None: - return None - - try: - # get_request() succeeds if we're in the middle of a webapp2 - # request, or raises an assertion error otherwise: - # "Request global variable is not set". - req = webapp2.get_request() - except AssertionError: - return None - - header = req.headers.get(_WEBAPP2_TRACE_HEADER) - - if header is None: - return None - - trace_id = header.split("/", 1)[0] - - return trace_id - - -def get_trace_id_from_django(): - """Get trace_id from django request headers. - - :rtype: str - :returns: TraceID in HTTP request headers. - """ - request = _get_django_request() - - if request is None: - return None - - header = request.META.get(_DJANGO_TRACE_HEADER) - if header is None: - return None - - trace_id = header.split("/", 1)[0] - - return trace_id - - -def get_trace_id(): - """Helper to get trace_id from web application request header. - - :rtype: str - :returns: TraceID in HTTP request headers. - """ - checkers = ( - get_trace_id_from_django, - get_trace_id_from_flask, - get_trace_id_from_webapp2, - ) - - for checker in checkers: - trace_id = checker() - if trace_id is not None: - return trace_id - - return None diff --git a/logging/google/cloud/logging/handlers/app_engine.py b/logging/google/cloud/logging/handlers/app_engine.py deleted file mode 100644 index d0179fb6dcfc..000000000000 --- a/logging/google/cloud/logging/handlers/app_engine.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright 2016 Google LLC All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Logging handler for App Engine Flexible - -Sends logs to the Stackdriver Logging API with the appropriate resource -and labels for App Engine logs. -""" - -import logging -import os - -from google.cloud.logging.handlers._helpers import get_trace_id -from google.cloud.logging.handlers.transports import BackgroundThreadTransport -from google.cloud.logging.resource import Resource - -_DEFAULT_GAE_LOGGER_NAME = "app" - -_GAE_PROJECT_ENV_FLEX = "GCLOUD_PROJECT" -_GAE_PROJECT_ENV_STANDARD = "GOOGLE_CLOUD_PROJECT" -_GAE_SERVICE_ENV = "GAE_SERVICE" -_GAE_VERSION_ENV = "GAE_VERSION" - -_TRACE_ID_LABEL = "appengine.googleapis.com/trace_id" - - -class AppEngineHandler(logging.StreamHandler): - """A logging handler that sends App Engine-formatted logs to Stackdriver. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The authenticated Google Cloud Logging client for this - handler to use. - - :type transport: :class:`type` - :param transport: The transport class. It should be a subclass - of :class:`.Transport`. If unspecified, - :class:`.BackgroundThreadTransport` will be used. - - :type stream: file-like object - :param stream: (optional) stream to be used by the handler. - """ - - def __init__( - self, - client, - name=_DEFAULT_GAE_LOGGER_NAME, - transport=BackgroundThreadTransport, - stream=None, - ): - super(AppEngineHandler, self).__init__(stream) - self.name = name - self.client = client - self.transport = transport(client, name) - self.project_id = os.environ.get( - _GAE_PROJECT_ENV_FLEX, os.environ.get(_GAE_PROJECT_ENV_STANDARD, "") - ) - self.module_id = os.environ.get(_GAE_SERVICE_ENV, "") - self.version_id = os.environ.get(_GAE_VERSION_ENV, "") - self.resource = self.get_gae_resource() - - def get_gae_resource(self): - """Return the GAE resource using the environment variables. - - :rtype: :class:`~google.cloud.logging.resource.Resource` - :returns: Monitored resource for GAE. - """ - gae_resource = Resource( - type="gae_app", - labels={ - "project_id": self.project_id, - "module_id": self.module_id, - "version_id": self.version_id, - }, - ) - return gae_resource - - def get_gae_labels(self): - """Return the labels for GAE app. - - If the trace ID can be detected, it will be included as a label. - Currently, no other labels are included. - - :rtype: dict - :returns: Labels for GAE app. - """ - gae_labels = {} - - trace_id = get_trace_id() - if trace_id is not None: - gae_labels[_TRACE_ID_LABEL] = trace_id - - return gae_labels - - def emit(self, record): - """Actually log the specified logging record. - - Overrides the default emit behavior of ``StreamHandler``. - - See https://docs.python.org/2/library/logging.html#handler-objects - - :type record: :class:`logging.LogRecord` - :param record: The record to be logged. - """ - message = super(AppEngineHandler, self).format(record) - gae_labels = self.get_gae_labels() - trace_id = ( - "projects/%s/traces/%s" % (self.project_id, gae_labels[_TRACE_ID_LABEL]) - if _TRACE_ID_LABEL in gae_labels - else None - ) - self.transport.send( - record, message, resource=self.resource, labels=gae_labels, trace=trace_id - ) diff --git a/logging/google/cloud/logging/handlers/container_engine.py b/logging/google/cloud/logging/handlers/container_engine.py deleted file mode 100644 index 9fe460889232..000000000000 --- a/logging/google/cloud/logging/handlers/container_engine.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2016 Google LLC All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Logging handler for Google Container Engine (GKE). - -Formats log messages in a JSON format, so that Kubernetes clusters with the -fluentd Google Cloud plugin installed can format their log messages so that -metadata such as log level is properly captured. -""" - -import logging.handlers - -from google.cloud.logging.handlers._helpers import format_stackdriver_json - - -class ContainerEngineHandler(logging.StreamHandler): - """Handler to format log messages the format expected by GKE fluent. - - This handler is written to format messages for the Google Container Engine - (GKE) fluentd plugin, so that metadata such as log level are properly set. - - :type name: str - :param name: (optional) the name of the custom log in Stackdriver Logging. - - :type stream: file-like object - :param stream: (optional) stream to be used by the handler. - """ - - def __init__(self, name=None, stream=None): - super(ContainerEngineHandler, self).__init__(stream=stream) - self.name = name - - def format(self, record): - """Format the message into JSON expected by fluentd. - - :type record: :class:`~logging.LogRecord` - :param record: the log record - - :rtype: str - :returns: A JSON string formatted for GKE fluentd. - """ - message = super(ContainerEngineHandler, self).format(record) - return format_stackdriver_json(record, message) diff --git a/logging/google/cloud/logging/handlers/handlers.py b/logging/google/cloud/logging/handlers/handlers.py deleted file mode 100644 index 111cec8d27cf..000000000000 --- a/logging/google/cloud/logging/handlers/handlers.py +++ /dev/null @@ -1,156 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Python :mod:`logging` handlers for Stackdriver Logging.""" - -import logging - -from google.cloud.logging.handlers.transports import BackgroundThreadTransport -from google.cloud.logging.logger import _GLOBAL_RESOURCE - -DEFAULT_LOGGER_NAME = "python" - -EXCLUDED_LOGGER_DEFAULTS = ("google.cloud", "google.auth", "google_auth_httplib2") - - -class CloudLoggingHandler(logging.StreamHandler): - """Handler that directly makes Stackdriver logging API calls. - - This is a Python standard ``logging`` handler using that can be used to - route Python standard logging messages directly to the Stackdriver - Logging API. - - This handler is used when not in GAE or GKE environment. - - This handler supports both an asynchronous and synchronous transport. - - :type client: :class:`google.cloud.logging.client.Client` - :param client: the authenticated Google Cloud Logging client for this - handler to use - - :type name: str - :param name: the name of the custom log in Stackdriver Logging. Defaults - to 'python'. The name of the Python logger will be represented - in the ``python_logger`` field. - - :type transport: :class:`type` - :param transport: Class for creating new transport objects. It should - extend from the base :class:`.Transport` type and - implement :meth`.Transport.send`. Defaults to - :class:`.BackgroundThreadTransport`. The other - option is :class:`.SyncTransport`. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry, defaults - to the global resource type. - - :type labels: dict - :param labels: (Optional) Mapping of labels for the entry. - - :type stream: file-like object - :param stream: (optional) stream to be used by the handler. - - Example: - - .. code-block:: python - - import logging - import google.cloud.logging - from google.cloud.logging.handlers import CloudLoggingHandler - - client = google.cloud.logging.Client() - handler = CloudLoggingHandler(client) - - cloud_logger = logging.getLogger('cloudLogger') - cloud_logger.setLevel(logging.INFO) - cloud_logger.addHandler(handler) - - cloud_logger.error('bad news') # API call - """ - - def __init__( - self, - client, - name=DEFAULT_LOGGER_NAME, - transport=BackgroundThreadTransport, - resource=_GLOBAL_RESOURCE, - labels=None, - stream=None, - ): - super(CloudLoggingHandler, self).__init__(stream) - self.name = name - self.client = client - self.transport = transport(client, name) - self.resource = resource - self.labels = labels - - def emit(self, record): - """Actually log the specified logging record. - - Overrides the default emit behavior of ``StreamHandler``. - - See https://docs.python.org/2/library/logging.html#handler-objects - - :type record: :class:`logging.LogRecord` - :param record: The record to be logged. - """ - message = super(CloudLoggingHandler, self).format(record) - self.transport.send(record, message, resource=self.resource, labels=self.labels) - - -def setup_logging( - handler, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, log_level=logging.INFO -): - """Attach a logging handler to the Python root logger - - Excludes loggers that this library itself uses to avoid - infinite recursion. - - :type handler: :class:`logging.handler` - :param handler: the handler to attach to the global handler - - :type excluded_loggers: tuple - :param excluded_loggers: (Optional) The loggers to not attach the handler - to. This will always include the loggers in the - path of the logging client itself. - - :type log_level: int - :param log_level: (Optional) Python logging log level. Defaults to - :const:`logging.INFO`. - - Example: - - .. code-block:: python - - import logging - import google.cloud.logging - from google.cloud.logging.handlers import CloudLoggingHandler - - client = google.cloud.logging.Client() - handler = CloudLoggingHandler(client) - google.cloud.logging.handlers.setup_logging(handler) - logging.getLogger().setLevel(logging.DEBUG) - - logging.error('bad news') # API call - - """ - all_excluded_loggers = set(excluded_loggers + EXCLUDED_LOGGER_DEFAULTS) - logger = logging.getLogger() - logger.setLevel(log_level) - logger.addHandler(handler) - logger.addHandler(logging.StreamHandler()) - for logger_name in all_excluded_loggers: - logger = logging.getLogger(logger_name) - logger.propagate = False - logger.addHandler(logging.StreamHandler()) diff --git a/logging/google/cloud/logging/handlers/middleware/__init__.py b/logging/google/cloud/logging/handlers/middleware/__init__.py deleted file mode 100644 index d8ba3016f724..000000000000 --- a/logging/google/cloud/logging/handlers/middleware/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2017 Google LLC All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.cloud.logging.handlers.middleware.request import RequestMiddleware - -__all__ = ["RequestMiddleware"] diff --git a/logging/google/cloud/logging/handlers/middleware/request.py b/logging/google/cloud/logging/handlers/middleware/request.py deleted file mode 100644 index 33bc278fcf60..000000000000 --- a/logging/google/cloud/logging/handlers/middleware/request.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2017 Google LLC All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Django middleware helper to capture a request. - -The request is stored on a thread-local so that it can be -inspected by other helpers. -""" - -import threading - - -_thread_locals = threading.local() - - -def _get_django_request(): - """Get Django request from thread local. - - :rtype: str - :returns: Django request. - """ - return getattr(_thread_locals, "request", None) - - -try: - # Django >= 1.10 - from django.utils.deprecation import MiddlewareMixin -except ImportError: - # Not required for Django <= 1.9, see: - # https://docs.djangoproject.com/en/1.10/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware - MiddlewareMixin = object - - -class RequestMiddleware(MiddlewareMixin): - """Saves the request in thread local""" - - def __init__(self, get_response=None): - self.get_response = get_response - - def process_request(self, request): - """Called on each request, before Django decides which view to execute. - - :type request: :class:`~django.http.request.HttpRequest` - :param request: Django http request. - """ - _thread_locals.request = request diff --git a/logging/google/cloud/logging/handlers/transports/__init__.py b/logging/google/cloud/logging/handlers/transports/__init__.py deleted file mode 100644 index 3c6cc214e5e3..000000000000 --- a/logging/google/cloud/logging/handlers/transports/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Transport classes for Python logging integration. - -Currently two options are provided, a synchronous transport that makes -an API call for each log statement, and an asynchronous handler that -sends the API using a :class:`~google.cloud.logging.logger.Batch` object in -the background. -""" - -from google.cloud.logging.handlers.transports.base import Transport -from google.cloud.logging.handlers.transports.sync import SyncTransport -from google.cloud.logging.handlers.transports.background_thread import ( - BackgroundThreadTransport, -) - -__all__ = ["BackgroundThreadTransport", "SyncTransport", "Transport"] diff --git a/logging/google/cloud/logging/handlers/transports/background_thread.py b/logging/google/cloud/logging/handlers/transports/background_thread.py deleted file mode 100644 index 812b733cff92..000000000000 --- a/logging/google/cloud/logging/handlers/transports/background_thread.py +++ /dev/null @@ -1,353 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Transport for Python logging handler - -Uses a background worker to log to Stackdriver Logging asynchronously. -""" - -from __future__ import print_function - -import atexit -import datetime -import logging -import sys -import threading -import time - -from six.moves import queue - -from google.cloud.logging import _helpers -from google.cloud.logging.handlers.transports.base import Transport - -_DEFAULT_GRACE_PERIOD = 5.0 # Seconds -_DEFAULT_MAX_BATCH_SIZE = 10 -_DEFAULT_MAX_LATENCY = 0 # Seconds -_WORKER_THREAD_NAME = "google.cloud.logging.Worker" -_WORKER_TERMINATOR = object() -_LOGGER = logging.getLogger(__name__) - - -def _get_many(queue_, max_items=None, max_latency=0): - """Get multiple items from a Queue. - - Gets at least one (blocking) and at most ``max_items`` items - (non-blocking) from a given Queue. Does not mark the items as done. - - :type queue_: :class:`~queue.Queue` - :param queue_: The Queue to get items from. - - :type max_items: int - :param max_items: The maximum number of items to get. If ``None``, then all - available items in the queue are returned. - - :type max_latency: float - :param max_latency: The maximum number of seconds to wait for more than one - item from a queue. This number includes the time required to retrieve - the first item. - - :rtype: list - :returns: items retrieved from the queue. - """ - start = time.time() - # Always return at least one item. - items = [queue_.get()] - while max_items is None or len(items) < max_items: - try: - elapsed = time.time() - start - timeout = max(0, max_latency - elapsed) - items.append(queue_.get(timeout=timeout)) - except queue.Empty: - break - return items - - -class _Worker(object): - """A background thread that writes batches of log entries. - - :type cloud_logger: :class:`~google.cloud.logging.logger.Logger` - :param cloud_logger: The logger to send entries to. - - :type grace_period: float - :param grace_period: The amount of time to wait for pending logs to - be submitted when the process is shutting down. - - :type max_batch_size: int - :param max_batch_size: The maximum number of items to send at a time - in the background thread. - - :type max_latency: float - :param max_latency: The amount of time to wait for new logs before - sending a new batch. It is strongly recommended to keep this smaller - than the grace_period. This means this is effectively the longest - amount of time the background thread will hold onto log entries - before sending them to the server. - """ - - def __init__( - self, - cloud_logger, - grace_period=_DEFAULT_GRACE_PERIOD, - max_batch_size=_DEFAULT_MAX_BATCH_SIZE, - max_latency=_DEFAULT_MAX_LATENCY, - ): - self._cloud_logger = cloud_logger - self._grace_period = grace_period - self._max_batch_size = max_batch_size - self._max_latency = max_latency - self._queue = queue.Queue(0) - self._operational_lock = threading.Lock() - self._thread = None - - @property - def is_alive(self): - """Returns True is the background thread is running.""" - return self._thread is not None and self._thread.is_alive() - - def _safely_commit_batch(self, batch): - total_logs = len(batch.entries) - - try: - if total_logs > 0: - batch.commit() - _LOGGER.debug("Submitted %d logs", total_logs) - except Exception: - _LOGGER.error("Failed to submit %d logs.", total_logs, exc_info=True) - - def _thread_main(self): - """The entry point for the worker thread. - - Pulls pending log entries off the queue and writes them in batches to - the Cloud Logger. - """ - _LOGGER.debug("Background thread started.") - - done = False - while not done: - batch = self._cloud_logger.batch() - items = _get_many( - self._queue, - max_items=self._max_batch_size, - max_latency=self._max_latency, - ) - - for item in items: - if item is _WORKER_TERMINATOR: - done = True # Continue processing items. - else: - batch.log_struct(**item) - - self._safely_commit_batch(batch) - - for _ in items: - self._queue.task_done() - - _LOGGER.debug("Background thread exited gracefully.") - - def start(self): - """Starts the background thread. - - Additionally, this registers a handler for process exit to attempt - to send any pending log entries before shutdown. - """ - with self._operational_lock: - if self.is_alive: - return - - self._thread = threading.Thread( - target=self._thread_main, name=_WORKER_THREAD_NAME - ) - self._thread.daemon = True - self._thread.start() - atexit.register(self._main_thread_terminated) - - def stop(self, grace_period=None): - """Signals the background thread to stop. - - This does not terminate the background thread. It simply queues the - stop signal. If the main process exits before the background thread - processes the stop signal, it will be terminated without finishing - work. The ``grace_period`` parameter will give the background - thread some time to finish processing before this function returns. - - :type grace_period: float - :param grace_period: If specified, this method will block up to this - many seconds to allow the background thread to finish work before - returning. - - :rtype: bool - :returns: True if the thread terminated. False if the thread is still - running. - """ - if not self.is_alive: - return True - - with self._operational_lock: - self._queue.put_nowait(_WORKER_TERMINATOR) - - if grace_period is not None: - print("Waiting up to %d seconds." % (grace_period,), file=sys.stderr) - - self._thread.join(timeout=grace_period) - - # Check this before disowning the thread, because after we disown - # the thread is_alive will be False regardless of if the thread - # exited or not. - success = not self.is_alive - - self._thread = None - - return success - - def _main_thread_terminated(self): - """Callback that attempts to send pending logs before termination.""" - if not self.is_alive: - return - - if not self._queue.empty(): - print( - "Program shutting down, attempting to send %d queued log " - "entries to Stackdriver Logging..." % (self._queue.qsize(),), - file=sys.stderr, - ) - - if self.stop(self._grace_period): - print("Sent all pending logs.", file=sys.stderr) - else: - print( - "Failed to send %d pending logs." % (self._queue.qsize(),), - file=sys.stderr, - ) - - def enqueue( - self, record, message, resource=None, labels=None, trace=None, span_id=None - ): - """Queues a log entry to be written by the background thread. - - :type record: :class:`logging.LogRecord` - :param record: Python log record that the handler was called with. - - :type message: str - :param message: The message from the ``LogRecord`` after being - formatted by the associated log formatters. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry - - :type labels: dict - :param labels: (Optional) Mapping of labels for the entry. - - :type trace: str - :param trace: (optional) traceid to apply to the logging entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. - """ - queue_entry = { - "info": {"message": message, "python_logger": record.name}, - "severity": _helpers._normalize_severity(record.levelno), - "resource": resource, - "labels": labels, - "trace": trace, - "span_id": span_id, - "timestamp": datetime.datetime.utcfromtimestamp(record.created), - } - self._queue.put_nowait(queue_entry) - - def flush(self): - """Submit any pending log records.""" - self._queue.join() - - -class BackgroundThreadTransport(Transport): - """Asynchronous transport that uses a background thread. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The Logging client. - - :type name: str - :param name: the name of the logger. - - :type grace_period: float - :param grace_period: The amount of time to wait for pending logs to - be submitted when the process is shutting down. - - :type batch_size: int - :param batch_size: The maximum number of items to send at a time in the - background thread. - - :type max_latency: float - :param max_latency: The amount of time to wait for new logs before - sending a new batch. It is strongly recommended to keep this smaller - than the grace_period. This means this is effectively the longest - amount of time the background thread will hold onto log entries - before sending them to the server. - """ - - def __init__( - self, - client, - name, - grace_period=_DEFAULT_GRACE_PERIOD, - batch_size=_DEFAULT_MAX_BATCH_SIZE, - max_latency=_DEFAULT_MAX_LATENCY, - ): - self.client = client - logger = self.client.logger(name) - self.worker = _Worker( - logger, - grace_period=grace_period, - max_batch_size=batch_size, - max_latency=max_latency, - ) - self.worker.start() - - def send( - self, record, message, resource=None, labels=None, trace=None, span_id=None - ): - """Overrides Transport.send(). - - :type record: :class:`logging.LogRecord` - :param record: Python log record that the handler was called with. - - :type message: str - :param message: The message from the ``LogRecord`` after being - formatted by the associated log formatters. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry. - - :type labels: dict - :param labels: (Optional) Mapping of labels for the entry. - - :type trace: str - :param trace: (optional) traceid to apply to the logging entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. - """ - self.worker.enqueue( - record, - message, - resource=resource, - labels=labels, - trace=trace, - span_id=span_id, - ) - - def flush(self): - """Submit any pending log records.""" - self.worker.flush() diff --git a/logging/google/cloud/logging/handlers/transports/base.py b/logging/google/cloud/logging/handlers/transports/base.py deleted file mode 100644 index 7e24cc0206ca..000000000000 --- a/logging/google/cloud/logging/handlers/transports/base.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Module containing base class for logging transport.""" - - -class Transport(object): - """Base class for Google Cloud Logging handler transports. - - Subclasses of :class:`Transport` must have constructors that accept a - client and name object, and must override :meth:`send`. - """ - - def send( - self, record, message, resource=None, labels=None, trace=None, span_id=None - ): - """Transport send to be implemented by subclasses. - - :type record: :class:`logging.LogRecord` - :param record: Python log record that the handler was called with. - - :type message: str - :param message: The message from the ``LogRecord`` after being - formatted by the associated log formatters. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry. - - :type labels: dict - :param labels: (Optional) Mapping of labels for the entry. - """ - raise NotImplementedError - - def flush(self): - """Submit any pending log records. - - For blocking/sync transports, this is a no-op. - """ diff --git a/logging/google/cloud/logging/handlers/transports/sync.py b/logging/google/cloud/logging/handlers/transports/sync.py deleted file mode 100644 index e87eb4885fbf..000000000000 --- a/logging/google/cloud/logging/handlers/transports/sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Transport for Python logging handler. - -Logs directly to the the Stackdriver Logging API with a synchronous call. -""" - -from google.cloud.logging import _helpers -from google.cloud.logging.handlers.transports.base import Transport - - -class SyncTransport(Transport): - """Basic sychronous transport. - - Uses this library's Logging client to directly make the API call. - """ - - def __init__(self, client, name): - self.logger = client.logger(name) - - def send( - self, record, message, resource=None, labels=None, trace=None, span_id=None - ): - """Overrides transport.send(). - - :type record: :class:`logging.LogRecord` - :param record: Python log record that the handler was called with. - - :type message: str - :param message: The message from the ``LogRecord`` after being - formatted by the associated log formatters. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry. - - :type labels: dict - :param labels: (Optional) Mapping of labels for the entry. - """ - info = {"message": message, "python_logger": record.name} - self.logger.log_struct( - info, - severity=_helpers._normalize_severity(record.levelno), - resource=resource, - labels=labels, - trace=trace, - span_id=span_id, - ) diff --git a/logging/google/cloud/logging/logger.py b/logging/google/cloud/logging/logger.py deleted file mode 100644 index b212b6e8b0c3..000000000000 --- a/logging/google/cloud/logging/logger.py +++ /dev/null @@ -1,384 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Loggers.""" - -from google.cloud.logging.entries import LogEntry -from google.cloud.logging.entries import ProtobufEntry -from google.cloud.logging.entries import StructEntry -from google.cloud.logging.entries import TextEntry -from google.cloud.logging.resource import Resource - - -_GLOBAL_RESOURCE = Resource(type="global", labels={}) - - -_OUTBOUND_ENTRY_FIELDS = ( # (name, default) - ("type_", None), - ("log_name", None), - ("payload", None), - ("labels", None), - ("insert_id", None), - ("severity", None), - ("http_request", None), - ("timestamp", None), - ("resource", _GLOBAL_RESOURCE), - ("trace", None), - ("span_id", None), - ("trace_sampled", None), - ("source_location", None), -) - - -class Logger(object): - """Loggers represent named targets for log entries. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs - - :type name: str - :param name: the name of the logger - - :type client: :class:`google.cloud.logging.client.Client` - :param client: A client which holds credentials and project configuration - for the logger (which requires a project). - - :type labels: dict - :param labels: (optional) mapping of default labels for entries written - via this logger. - """ - - def __init__(self, name, client, labels=None): - self.name = name - self._client = client - self.labels = labels - - @property - def client(self): - """Clent bound to the logger.""" - return self._client - - @property - def project(self): - """Project bound to the logger.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in logging APIs""" - return "projects/%s/logs/%s" % (self.project, self.name) - - @property - def path(self): - """URI path for use in logging APIs""" - return "/%s" % (self.full_name,) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current logger. - - :rtype: :class:`google.cloud.logging.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def batch(self, client=None): - """Return a batch to use as a context manager. - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: :class:`Batch` - :returns: A batch to use as a context manager. - """ - client = self._require_client(client) - return Batch(self, client) - - def _do_log(self, client, _entry_class, payload=None, **kw): - """Helper for :meth:`log_empty`, :meth:`log_text`, etc. - """ - client = self._require_client(client) - - # Apply defaults - kw["log_name"] = kw.pop("log_name", self.full_name) - kw["labels"] = kw.pop("labels", self.labels) - kw["resource"] = kw.pop("resource", _GLOBAL_RESOURCE) - - if payload is not None: - entry = _entry_class(payload=payload, **kw) - else: - entry = _entry_class(**kw) - - api_repr = entry.to_api_repr() - client.logging_api.write_entries([api_repr]) - - def log_empty(self, client=None, **kw): - """API call: log an empty message via a POST request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current logger. - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self._do_log(client, LogEntry, **kw) - - def log_text(self, text, client=None, **kw): - """API call: log a text message via a POST request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write - - :type text: str - :param text: the log message. - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current logger. - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self._do_log(client, TextEntry, text, **kw) - - def log_struct(self, info, client=None, **kw): - """API call: log a structured message via a POST request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write - - :type info: dict - :param info: the log entry information - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current logger. - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self._do_log(client, StructEntry, info, **kw) - - def log_proto(self, message, client=None, **kw): - """API call: log a protobuf message via a POST request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list - - :type message: :class:`~google.protobuf.message.Message` - :param message: The protobuf message to be logged. - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current logger. - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self._do_log(client, ProtobufEntry, message, **kw) - - def delete(self, client=None): - """API call: delete all entries in a logger via a DELETE request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs/delete - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current logger. - """ - client = self._require_client(client) - client.logging_api.logger_delete(self.project, self.name) - - def list_entries( - self, - projects=None, - filter_=None, - order_by=None, - page_size=None, - page_token=None, - ): - """Return a page of log entries. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list - - :type projects: list of strings - :param projects: project IDs to include. If not passed, - defaults to the project bound to the client. - - :type filter_: str - :param filter_: - a filter expression. See - https://cloud.google.com/logging/docs/view/advanced_filters - - :type order_by: str - :param order_by: One of :data:`~google.cloud.logging.ASCENDING` - or :data:`~google.cloud.logging.DESCENDING`. - - :type page_size: int - :param page_size: - Optional. The maximum number of entries in each page of results - from this request. Non-positive values are ignored. Defaults - to a sensible value set by the API. - - :type page_token: str - :param page_token: - Optional. If present, return the next batch of entries, using - the value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property of the returned iterator instead of manually passing - the token. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of log entries accessible to the current logger. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - log_filter = "logName=%s" % (self.full_name,) - if filter_ is not None: - filter_ = "%s AND %s" % (filter_, log_filter) - else: - filter_ = log_filter - return self.client.list_entries( - projects=projects, - filter_=filter_, - order_by=order_by, - page_size=page_size, - page_token=page_token, - ) - - -class Batch(object): - """Context manager: collect entries to log via a single API call. - - Helper returned by :meth:`Logger.batch` - - :type logger: :class:`google.cloud.logging.logger.Logger` - :param logger: the logger to which entries will be logged. - - :type client: :class:`google.cloud.logging.client.Client` - :param client: The client to use. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the batch, defaults - to None, which requires that every entry should have a - resource specified. Since the methods used to write - entries default the entry's resource to the global - resource type, this parameter is only required - if explicitly set to None. If no entries' resource are - set to None, this parameter will be ignored on the server. - """ - - def __init__(self, logger, client, resource=None): - self.logger = logger - self.entries = [] - self.client = client - self.resource = resource - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is None: - self.commit() - - def log_empty(self, **kw): - """Add a entry without payload to be logged during :meth:`commit`. - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self.entries.append(LogEntry(**kw)) - - def log_text(self, text, **kw): - """Add a text entry to be logged during :meth:`commit`. - - :type text: str - :param text: the text entry - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self.entries.append(TextEntry(payload=text, **kw)) - - def log_struct(self, info, **kw): - """Add a struct entry to be logged during :meth:`commit`. - - :type info: dict - :param info: the struct entry - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self.entries.append(StructEntry(payload=info, **kw)) - - def log_proto(self, message, **kw): - """Add a protobuf entry to be logged during :meth:`commit`. - - :type message: protobuf message - :param message: the protobuf entry - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self.entries.append(ProtobufEntry(payload=message, **kw)) - - def commit(self, client=None): - """Send saved log entries as a single API call. - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - """ - if client is None: - client = self.client - - kwargs = {"logger_name": self.logger.full_name} - - if self.resource is not None: - kwargs["resource"] = self.resource._to_dict() - - if self.logger.labels is not None: - kwargs["labels"] = self.logger.labels - - entries = [entry.to_api_repr() for entry in self.entries] - - client.logging_api.write_entries(entries, **kwargs) - del self.entries[:] diff --git a/logging/google/cloud/logging/metric.py b/logging/google/cloud/logging/metric.py deleted file mode 100644 index 3fb91bb52f0a..000000000000 --- a/logging/google/cloud/logging/metric.py +++ /dev/null @@ -1,185 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define Stackdriver Logging API Metrics.""" - -from google.cloud.exceptions import NotFound - - -class Metric(object): - """Metrics represent named filters for log entries. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics - - :type name: str - :param name: the name of the metric - - :type filter_: str - :param filter_: the advanced logs filter expression defining the entries - tracked by the metric. If not passed, the instance should - already exist, to be refreshed via :meth:`reload`. - - :type client: :class:`google.cloud.logging.client.Client` - :param client: A client which holds credentials and project configuration - for the metric (which requires a project). - - :type description: str - :param description: an optional description of the metric. - """ - - def __init__(self, name, filter_=None, client=None, description=""): - self.name = name - self._client = client - self.filter_ = filter_ - self.description = description - - @property - def client(self): - """Clent bound to the logger.""" - return self._client - - @property - def project(self): - """Project bound to the logger.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in metric APIs""" - return "projects/%s/metrics/%s" % (self.project, self.name) - - @property - def path(self): - """URL path for the metric's APIs""" - return "/%s" % (self.full_name,) - - @classmethod - def from_api_repr(cls, resource, client): - """Factory: construct a metric given its API representation - - :type resource: dict - :param resource: metric resource representation returned from the API - - :type client: :class:`google.cloud.logging.client.Client` - :param client: Client which holds credentials and project - configuration for the metric. - - :rtype: :class:`google.cloud.logging.metric.Metric` - :returns: Metric parsed from ``resource``. - """ - metric_name = resource["name"] - filter_ = resource["filter"] - description = resource.get("description", "") - return cls(metric_name, filter_, client=client, description=description) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current metric. - - :rtype: :class:`google.cloud.logging.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None): - """API call: create the metric via a PUT request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current metric. - """ - client = self._require_client(client) - client.metrics_api.metric_create( - self.project, self.name, self.filter_, self.description - ) - - def exists(self, client=None): - """API call: test for the existence of the metric via a GET request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current metric. - - :rtype: bool - :returns: Boolean indicating existence of the metric. - """ - client = self._require_client(client) - - try: - client.metrics_api.metric_get(self.project, self.name) - except NotFound: - return False - else: - return True - - def reload(self, client=None): - """API call: sync local metric configuration via a GET request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current metric. - """ - client = self._require_client(client) - data = client.metrics_api.metric_get(self.project, self.name) - self.description = data.get("description", "") - self.filter_ = data["filter"] - - def update(self, client=None): - """API call: update metric configuration via a PUT request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current metric. - """ - client = self._require_client(client) - client.metrics_api.metric_update( - self.project, self.name, self.filter_, self.description - ) - - def delete(self, client=None): - """API call: delete a metric via a DELETE request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/delete - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current metric. - """ - client = self._require_client(client) - client.metrics_api.metric_delete(self.project, self.name) diff --git a/logging/google/cloud/logging/resource.py b/logging/google/cloud/logging/resource.py deleted file mode 100644 index dda59ca09f61..000000000000 --- a/logging/google/cloud/logging/resource.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Monitored Resource for the Google Logging API V2.""" - -import collections - - -class Resource(collections.namedtuple("Resource", "type labels")): - """A monitored resource identified by specifying values for all labels. - - :type type: str - :param type: The resource type name. - - :type labels: dict - :param labels: A mapping from label names to values for all labels - enumerated in the associated :class:`ResourceDescriptor`. - """ - - __slots__ = () - - @classmethod - def _from_dict(cls, info): - """Construct a resource object from the parsed JSON representation. - - :type info: dict - :param info: - A ``dict`` parsed from the JSON wire-format representation. - - :rtype: :class:`Resource` - :returns: A resource object. - """ - return cls(type=info["type"], labels=info.get("labels", {})) - - def _to_dict(self): - """Build a dictionary ready to be serialized to the JSON format. - - :rtype: dict - :returns: A dict representation of the object that can be written to - the API. - """ - return {"type": self.type, "labels": self.labels} diff --git a/logging/google/cloud/logging/sink.py b/logging/google/cloud/logging/sink.py deleted file mode 100644 index 2a7d46fdbb81..000000000000 --- a/logging/google/cloud/logging/sink.py +++ /dev/null @@ -1,220 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define Stackdriver Logging API Sinks.""" - -from google.cloud.exceptions import NotFound - - -class Sink(object): - """Sinks represent filtered exports for log entries. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks - - :type name: str - :param name: the name of the sink - - :type filter_: str - :param filter_: (optional) the advanced logs filter expression defining - the entries exported by the sink. - - :type destination: str - :param destination: destination URI for the entries exported by the sink. - If not passed, the instance should already exist, to - be refreshed via :meth:`reload`. - - :type client: :class:`google.cloud.logging.client.Client` - :param client: A client which holds credentials and project configuration - for the sink (which requires a project). - """ - - def __init__(self, name, filter_=None, destination=None, client=None): - self.name = name - self.filter_ = filter_ - self.destination = destination - self._client = client - self._writer_identity = None - - @property - def client(self): - """Client bound to the sink.""" - return self._client - - @property - def project(self): - """Project bound to the sink.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in sink APIs""" - return "projects/%s/sinks/%s" % (self.project, self.name) - - @property - def path(self): - """URL path for the sink's APIs""" - return "/%s" % (self.full_name) - - @property - def writer_identity(self): - """Identity used for exports via the sink""" - return self._writer_identity - - def _update_from_api_repr(self, resource): - """Helper for API methods returning sink resources.""" - self.destination = resource["destination"] - self.filter_ = resource.get("filter") - self._writer_identity = resource.get("writerIdentity") - - @classmethod - def from_api_repr(cls, resource, client): - """Factory: construct a sink given its API representation - - :type resource: dict - :param resource: sink resource representation returned from the API - - :type client: :class:`google.cloud.logging.client.Client` - :param client: Client which holds credentials and project - configuration for the sink. - - :rtype: :class:`google.cloud.logging.sink.Sink` - :returns: Sink parsed from ``resource``. - :raises: :class:`ValueError` if ``client`` is not ``None`` and the - project from the resource does not agree with the project - from the client. - """ - sink_name = resource["name"] - instance = cls(sink_name, client=client) - instance._update_from_api_repr(resource) - return instance - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current sink. - - :rtype: :class:`google.cloud.logging.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None, unique_writer_identity=False): - """API call: create the sink via a PUT request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current sink. - - :type unique_writer_identity: bool - :param unique_writer_identity: (Optional) determines the kind of - IAM identity returned as - writer_identity in the new sink. - """ - client = self._require_client(client) - resource = client.sinks_api.sink_create( - self.project, - self.name, - self.filter_, - self.destination, - unique_writer_identity=unique_writer_identity, - ) - self._update_from_api_repr(resource) - - def exists(self, client=None): - """API call: test for the existence of the sink via a GET request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current sink. - - :rtype: bool - :returns: Boolean indicating existence of the sink. - """ - client = self._require_client(client) - - try: - client.sinks_api.sink_get(self.project, self.name) - except NotFound: - return False - else: - return True - - def reload(self, client=None): - """API call: sync local sink configuration via a GET request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current sink. - """ - client = self._require_client(client) - resource = client.sinks_api.sink_get(self.project, self.name) - self._update_from_api_repr(resource) - - def update(self, client=None, unique_writer_identity=False): - """API call: update sink configuration via a PUT request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/update - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current sink. - - :type unique_writer_identity: bool - :param unique_writer_identity: (Optional) determines the kind of - IAM identity returned as - writer_identity in the new sink. - """ - client = self._require_client(client) - resource = client.sinks_api.sink_update( - self.project, - self.name, - self.filter_, - self.destination, - unique_writer_identity=unique_writer_identity, - ) - self._update_from_api_repr(resource) - - def delete(self, client=None): - """API call: delete a sink via a DELETE request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/delete - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current sink. - """ - client = self._require_client(client) - client.sinks_api.sink_delete(self.project, self.name) diff --git a/logging/google/cloud/logging_v2/__init__.py b/logging/google/cloud/logging_v2/__init__.py deleted file mode 100644 index 964c99572fd6..000000000000 --- a/logging/google/cloud/logging_v2/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -from google.cloud.logging_v2 import types -from google.cloud.logging_v2.gapic import config_service_v2_client -from google.cloud.logging_v2.gapic import enums -from google.cloud.logging_v2.gapic import logging_service_v2_client -from google.cloud.logging_v2.gapic import metrics_service_v2_client - - -class LoggingServiceV2Client(logging_service_v2_client.LoggingServiceV2Client): - __doc__ = logging_service_v2_client.LoggingServiceV2Client.__doc__ - enums = enums - - -class ConfigServiceV2Client(config_service_v2_client.ConfigServiceV2Client): - __doc__ = config_service_v2_client.ConfigServiceV2Client.__doc__ - enums = enums - - -class MetricsServiceV2Client(metrics_service_v2_client.MetricsServiceV2Client): - __doc__ = metrics_service_v2_client.MetricsServiceV2Client.__doc__ - enums = enums - - -__all__ = ( - "enums", - "types", - "LoggingServiceV2Client", - "ConfigServiceV2Client", - "MetricsServiceV2Client", -) diff --git a/logging/google/cloud/logging_v2/gapic/__init__.py b/logging/google/cloud/logging_v2/gapic/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py deleted file mode 100644 index 521b2e304f84..000000000000 --- a/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ /dev/null @@ -1,1297 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.logging.v2 ConfigServiceV2 API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.cloud.logging_v2.gapic import config_service_v2_client_config -from google.cloud.logging_v2.gapic import enums -from google.cloud.logging_v2.gapic.transports import config_service_v2_grpc_transport -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2_grpc -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version - - -class ConfigServiceV2Client(object): - """Service for configuring sinks used to route log entries.""" - - SERVICE_ADDRESS = "logging.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.logging.v2.ConfigServiceV2" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ConfigServiceV2Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def billing_path(cls, billing_account): - """DEPRECATED. Return a fully-qualified billing string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account, - ) - - @classmethod - def billing_exclusion_path(cls, billing_account, exclusion): - """DEPRECATED. Return a fully-qualified billing_exclusion string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "billingAccounts/{billing_account}/exclusions/{exclusion}", - billing_account=billing_account, - exclusion=exclusion, - ) - - @classmethod - def billing_sink_path(cls, billing_account, sink): - """DEPRECATED. Return a fully-qualified billing_sink string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "billingAccounts/{billing_account}/sinks/{sink}", - billing_account=billing_account, - sink=sink, - ) - - @classmethod - def exclusion_path(cls, project, exclusion): - """DEPRECATED. Return a fully-qualified exclusion string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "projects/{project}/exclusions/{exclusion}", - project=project, - exclusion=exclusion, - ) - - @classmethod - def folder_path(cls, folder): - """DEPRECATED. Return a fully-qualified folder string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand("folders/{folder}", folder=folder,) - - @classmethod - def folder_exclusion_path(cls, folder, exclusion): - """DEPRECATED. Return a fully-qualified folder_exclusion string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "folders/{folder}/exclusions/{exclusion}", - folder=folder, - exclusion=exclusion, - ) - - @classmethod - def folder_sink_path(cls, folder, sink): - """DEPRECATED. Return a fully-qualified folder_sink string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "folders/{folder}/sinks/{sink}", folder=folder, sink=sink, - ) - - @classmethod - def organization_path(cls, organization): - """DEPRECATED. Return a fully-qualified organization string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization, - ) - - @classmethod - def organization_exclusion_path(cls, organization, exclusion): - """DEPRECATED. Return a fully-qualified organization_exclusion string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "organizations/{organization}/exclusions/{exclusion}", - organization=organization, - exclusion=exclusion, - ) - - @classmethod - def organization_sink_path(cls, organization, sink): - """DEPRECATED. Return a fully-qualified organization_sink string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "organizations/{organization}/sinks/{sink}", - organization=organization, - sink=sink, - ) - - @classmethod - def project_path(cls, project): - """DEPRECATED. Return a fully-qualified project string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - @classmethod - def sink_path(cls, project, sink): - """DEPRECATED. Return a fully-qualified sink string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "projects/{project}/sinks/{sink}", project=project, sink=sink, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.ConfigServiceV2GrpcTransport, - Callable[[~.Credentials, type], ~.ConfigServiceV2GrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = config_service_v2_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def list_sinks( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists sinks. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_sinks(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_sinks(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The parent resource whose sinks are to be listed: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.logging_v2.types.LogSink` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_sinks" not in self._inner_api_calls: - self._inner_api_calls[ - "list_sinks" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_sinks, - default_retry=self._method_configs["ListSinks"].retry, - default_timeout=self._method_configs["ListSinks"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.ListSinksRequest( - parent=parent, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_sinks"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="sinks", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_sink( - self, - sink_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a sink. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') - >>> - >>> response = client.get_sink(sink_name) - - Args: - sink_name (str): Required. The resource name of the sink: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogSink` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_sink" not in self._inner_api_calls: - self._inner_api_calls[ - "get_sink" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_sink, - default_retry=self._method_configs["GetSink"].retry, - default_timeout=self._method_configs["GetSink"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.GetSinkRequest(sink_name=sink_name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("sink_name", sink_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_sink"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_sink( - self, - parent, - sink, - unique_writer_identity=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a sink that exports specified log entries to a destination. The - export of newly-ingested log entries begins immediately, unless the - sink's ``writer_identity`` is not permitted to write to the destination. - A sink can export log entries only from the resource owning the sink. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `sink`: - >>> sink = {} - >>> - >>> response = client.create_sink(parent, sink) - - Args: - parent (str): Required. The resource in which to create the sink: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. - sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The new sink, whose ``name`` parameter is a sink identifier - that is not already in use. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogSink` - unique_writer_identity (bool): Optional. Determines the kind of IAM identity returned as - ``writer_identity`` in the new sink. If this value is omitted or set to - false, and if the sink's parent is a project, then the value returned as - ``writer_identity`` is the same group or service account used by Logging - before the addition of writer identities to this API. The sink's - destination must be in the same project as the sink itself. - - If this field is set to true, or if the sink is owned by a non-project - resource such as an organization, then the value of ``writer_identity`` - will be a unique service account used only for exports from the new - sink. For more information, see ``writer_identity`` in ``LogSink``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogSink` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_sink" not in self._inner_api_calls: - self._inner_api_calls[ - "create_sink" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_sink, - default_retry=self._method_configs["CreateSink"].retry, - default_timeout=self._method_configs["CreateSink"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.CreateSinkRequest( - parent=parent, sink=sink, unique_writer_identity=unique_writer_identity, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_sink"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_sink( - self, - sink_name, - sink, - unique_writer_identity=None, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a sink. This method replaces the following fields in the - existing sink with values from the new sink: ``destination``, and - ``filter``. - - The updated sink might also have a new ``writer_identity``; see the - ``unique_writer_identity`` field. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') - >>> - >>> # TODO: Initialize `sink`: - >>> sink = {} - >>> - >>> response = client.update_sink(sink_name, sink) - - Args: - sink_name (str): Required. The full resource name of the sink to update, including the - parent resource and the sink identifier: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The updated sink, whose name is the same identifier that - appears as part of ``sink_name``. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogSink` - unique_writer_identity (bool): Optional. See ``sinks.create`` for a description of this field. When - updating a sink, the effect of this field on the value of - ``writer_identity`` in the updated sink depends on both the old and new - values of this field: - - - If the old and new values of this field are both false or both true, - then there is no change to the sink's ``writer_identity``. - - If the old value is false and the new value is true, then - ``writer_identity`` is changed to a unique service account. - - It is an error if the old value is true and the new value is set to - false or defaulted to false. - update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Optional. Field mask that specifies the fields in ``sink`` that need an - update. A sink field will be overwritten if, and only if, it is in the - update mask. ``name`` and output only fields cannot be updated. - - An empty updateMask is temporarily treated as using the following mask - for backwards compatibility purposes: destination,filter,includeChildren - At some point in the future, behavior will be removed and specifying an - empty updateMask will be an error. - - For a detailed ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - - Example: ``updateMask=filter``. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogSink` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_sink" not in self._inner_api_calls: - self._inner_api_calls[ - "update_sink" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_sink, - default_retry=self._method_configs["UpdateSink"].retry, - default_timeout=self._method_configs["UpdateSink"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.UpdateSinkRequest( - sink_name=sink_name, - sink=sink, - unique_writer_identity=unique_writer_identity, - update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("sink_name", sink_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_sink"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_sink( - self, - sink_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a sink. If the sink has a unique ``writer_identity``, then that - service account is also deleted. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') - >>> - >>> client.delete_sink(sink_name) - - Args: - sink_name (str): Required. The full resource name of the sink to delete, including the - parent resource and the sink identifier: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_sink" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_sink" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_sink, - default_retry=self._method_configs["DeleteSink"].retry, - default_timeout=self._method_configs["DeleteSink"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("sink_name", sink_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_sink"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_exclusions( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists all the exclusions in a parent resource. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_exclusions(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_exclusions(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The parent resource whose exclusions are to be listed. - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.logging_v2.types.LogExclusion` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_exclusions" not in self._inner_api_calls: - self._inner_api_calls[ - "list_exclusions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_exclusions, - default_retry=self._method_configs["ListExclusions"].retry, - default_timeout=self._method_configs["ListExclusions"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.ListExclusionsRequest( - parent=parent, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_exclusions"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="exclusions", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_exclusion( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the description of an exclusion. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') - >>> - >>> response = client.get_exclusion(name) - - Args: - name (str): Required. The resource name of an existing exclusion: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogExclusion` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_exclusion" not in self._inner_api_calls: - self._inner_api_calls[ - "get_exclusion" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_exclusion, - default_retry=self._method_configs["GetExclusion"].retry, - default_timeout=self._method_configs["GetExclusion"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.GetExclusionRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_exclusion"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_exclusion( - self, - parent, - exclusion, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new exclusion in a specified parent resource. - Only log entries belonging to that resource can be excluded. - You can have up to 10 exclusions in a resource. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `exclusion`: - >>> exclusion = {} - >>> - >>> response = client.create_exclusion(parent, exclusion) - - Args: - parent (str): Required. The parent resource in which to create the exclusion: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. - exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. The new exclusion, whose ``name`` parameter is an exclusion - name that is not already used in the parent resource. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogExclusion` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogExclusion` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_exclusion" not in self._inner_api_calls: - self._inner_api_calls[ - "create_exclusion" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_exclusion, - default_retry=self._method_configs["CreateExclusion"].retry, - default_timeout=self._method_configs["CreateExclusion"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.CreateExclusionRequest( - parent=parent, exclusion=exclusion, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_exclusion"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_exclusion( - self, - name, - exclusion, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Changes one or more properties of an existing exclusion. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') - >>> - >>> # TODO: Initialize `exclusion`: - >>> exclusion = {} - >>> - >>> # TODO: Initialize `update_mask`: - >>> update_mask = {} - >>> - >>> response = client.update_exclusion(name, exclusion, update_mask) - - Args: - name (str): Required. The resource name of the exclusion to update: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. - exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. New values for the existing exclusion. Only the fields - specified in ``update_mask`` are relevant. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogExclusion` - update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Required. A non-empty list of fields to change in the existing - exclusion. New values for the fields are taken from the corresponding - fields in the ``LogExclusion`` included in this request. Fields not - mentioned in ``update_mask`` are not changed and are ignored in the - request. - - For example, to change the filter and description of an exclusion, - specify an ``update_mask`` of ``"filter,description"``. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogExclusion` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_exclusion" not in self._inner_api_calls: - self._inner_api_calls[ - "update_exclusion" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_exclusion, - default_retry=self._method_configs["UpdateExclusion"].retry, - default_timeout=self._method_configs["UpdateExclusion"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.UpdateExclusionRequest( - name=name, exclusion=exclusion, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_exclusion"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_exclusion( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an exclusion. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') - >>> - >>> client.delete_exclusion(name) - - Args: - name (str): Required. The resource name of an existing exclusion to delete: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_exclusion" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_exclusion" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_exclusion, - default_retry=self._method_configs["DeleteExclusion"].retry, - default_timeout=self._method_configs["DeleteExclusion"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.DeleteExclusionRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_exclusion"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py b/logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py deleted file mode 100644 index b7c00db4cd93..000000000000 --- a/logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py +++ /dev/null @@ -1,82 +0,0 @@ -config = { - "interfaces": { - "google.logging.v2.ConfigServiceV2": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - }, - "write_sink": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - }, - }, - "methods": { - "ListSinks": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetSink": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateSink": { - "timeout_millis": 120000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateSink": { - "timeout_millis": 120000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteSink": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListExclusions": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetExclusion": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateExclusion": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateExclusion": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteExclusion": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/logging/google/cloud/logging_v2/gapic/enums.py b/logging/google/cloud/logging_v2/gapic/enums.py deleted file mode 100644 index e677017ccbd2..000000000000 --- a/logging/google/cloud/logging_v2/gapic/enums.py +++ /dev/null @@ -1,215 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class LaunchStage(enum.IntEnum): - """ - The launch stage as defined by `Google Cloud Platform Launch - Stages `__. - - Attributes: - LAUNCH_STAGE_UNSPECIFIED (int): Do not use this default value. - EARLY_ACCESS (int): Early Access features are limited to a closed group of testers. To use - these features, you must sign up in advance and sign a Trusted Tester - agreement (which includes confidentiality provisions). These features may - be unstable, changed in backward-incompatible ways, and are not - guaranteed to be released. - ALPHA (int): Alpha is a limited availability test for releases before they are cleared - for widespread use. By Alpha, all significant design issues are resolved - and we are in the process of verifying functionality. Alpha customers - need to apply for access, agree to applicable terms, and have their - projects whitelisted. Alpha releases don’t have to be feature complete, - no SLAs are provided, and there are no technical support obligations, but - they will be far enough along that customers can actually use them in - test environments or for limited-use tests -- just like they would in - normal production cases. - BETA (int): Beta is the point at which we are ready to open a release for any - customer to use. There are no SLA or technical support obligations in a - Beta release. Products will be complete from a feature perspective, but - may have some open outstanding issues. Beta releases are suitable for - limited production use cases. - GA (int): GA features are open to all developers and are considered stable and - fully qualified for production use. - DEPRECATED (int): Deprecated features are scheduled to be shut down and removed. For more - information, see the “Deprecation Policy” section of our `Terms of - Service `__ and the `Google Cloud - Platform Subject to the Deprecation - Policy `__ documentation. - """ - - LAUNCH_STAGE_UNSPECIFIED = 0 - EARLY_ACCESS = 1 - ALPHA = 2 - BETA = 3 - GA = 4 - DEPRECATED = 5 - - -class LogSeverity(enum.IntEnum): - """ - The severity of the event described in a log entry, expressed as one of - the standard severity levels listed below. For your reference, the - levels are assigned the listed numeric values. The effect of using - numeric values other than those listed is undefined. - - You can filter for log entries by severity. For example, the following - filter expression will match log entries with severities ``INFO``, - ``NOTICE``, and ``WARNING``: - - :: - - severity > DEBUG AND severity <= WARNING - - If you are writing log entries, you should map other severity encodings - to one of these standard levels. For example, you might map all of - Java's FINE, FINER, and FINEST levels to ``LogSeverity.DEBUG``. You can - preserve the original severity level in the log entry payload if you - wish. - - Attributes: - DEFAULT (int): (0) The log entry has no assigned severity level. - DEBUG (int): (100) Debug or trace information. - INFO (int): (200) Routine information, such as ongoing status or performance. - NOTICE (int): (300) Normal but significant events, such as start up, shut down, or - a configuration change. - WARNING (int): (400) Warning events might cause problems. - ERROR (int): (500) Error events are likely to cause problems. - CRITICAL (int): (600) Critical events cause more severe problems or outages. - ALERT (int): (700) A person must take an action immediately. - EMERGENCY (int): (800) One or more systems are unusable. - """ - - DEFAULT = 0 - DEBUG = 100 - INFO = 200 - NOTICE = 300 - WARNING = 400 - ERROR = 500 - CRITICAL = 600 - ALERT = 700 - EMERGENCY = 800 - - -class NullValue(enum.IntEnum): - """ - ``NullValue`` is a singleton enumeration to represent the null value for - the ``Value`` type union. - - The JSON representation for ``NullValue`` is JSON ``null``. - - Attributes: - NULL_VALUE (int): Null value. - """ - - NULL_VALUE = 0 - - -class LabelDescriptor(object): - class ValueType(enum.IntEnum): - """ - Value types that can be used as label values. - - Attributes: - STRING (int): A variable-length string. This is the default. - BOOL (int): Boolean; true or false. - INT64 (int): A 64-bit signed integer. - """ - - STRING = 0 - BOOL = 1 - INT64 = 2 - - -class LogMetric(object): - class ApiVersion(enum.IntEnum): - """ - Logging API version. - - Attributes: - V2 (int): Logging API v2. - V1 (int): Logging API v1. - """ - - V2 = 0 - V1 = 1 - - -class LogSink(object): - class VersionFormat(enum.IntEnum): - """ - Available log entry formats. Log entries can be written to - Logging in either format and can be exported in either format. - Version 2 is the preferred format. - - Attributes: - VERSION_FORMAT_UNSPECIFIED (int): An unspecified format version that will default to V2. - V2 (int): ``LogEntry`` version 2 format. - V1 (int): ``LogEntry`` version 1 format. - """ - - VERSION_FORMAT_UNSPECIFIED = 0 - V2 = 1 - V1 = 2 - - -class MetricDescriptor(object): - class MetricKind(enum.IntEnum): - """ - The kind of measurement. It describes how the data is reported. - - Attributes: - METRIC_KIND_UNSPECIFIED (int): Do not use this default value. - GAUGE (int): An instantaneous measurement of a value. - DELTA (int): The change in a value during a time interval. - CUMULATIVE (int): A value accumulated over a time interval. Cumulative - measurements in a time series should have the same start time - and increasing end times, until an event resets the cumulative - value to zero and sets a new start time for the following - points. - """ - - METRIC_KIND_UNSPECIFIED = 0 - GAUGE = 1 - DELTA = 2 - CUMULATIVE = 3 - - class ValueType(enum.IntEnum): - """ - The value type of a metric. - - Attributes: - VALUE_TYPE_UNSPECIFIED (int): Do not use this default value. - BOOL (int): The value is a boolean. This value type can be used only if the metric - kind is ``GAUGE``. - INT64 (int): The value is a signed 64-bit integer. - DOUBLE (int): The value is a double precision floating point number. - STRING (int): The value is a text string. This value type can be used only if the - metric kind is ``GAUGE``. - DISTRIBUTION (int): The value is a ``Distribution``. - MONEY (int): The value is money. - """ - - VALUE_TYPE_UNSPECIFIED = 0 - BOOL = 1 - INT64 = 2 - DOUBLE = 3 - STRING = 4 - DISTRIBUTION = 5 - MONEY = 6 diff --git a/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py deleted file mode 100644 index 072c4ebbbb7c..000000000000 --- a/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ /dev/null @@ -1,846 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.logging.v2 LoggingServiceV2 API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.api import monitored_resource_pb2 -from google.cloud.logging_v2.gapic import enums -from google.cloud.logging_v2.gapic import logging_service_v2_client_config -from google.cloud.logging_v2.gapic.transports import logging_service_v2_grpc_transport -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2_grpc -from google.cloud.logging_v2.proto import logging_pb2 -from google.cloud.logging_v2.proto import logging_pb2_grpc -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version - - -class LoggingServiceV2Client(object): - """Service for ingesting and querying logs.""" - - SERVICE_ADDRESS = "logging.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.logging.v2.LoggingServiceV2" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LoggingServiceV2Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def billing_path(cls, billing_account): - """DEPRECATED. Return a fully-qualified billing string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account, - ) - - @classmethod - def billing_log_path(cls, billing_account, log): - """DEPRECATED. Return a fully-qualified billing_log string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "billingAccounts/{billing_account}/logs/{log}", - billing_account=billing_account, - log=log, - ) - - @classmethod - def folder_path(cls, folder): - """DEPRECATED. Return a fully-qualified folder string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand("folders/{folder}", folder=folder,) - - @classmethod - def folder_log_path(cls, folder, log): - """DEPRECATED. Return a fully-qualified folder_log string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "folders/{folder}/logs/{log}", folder=folder, log=log, - ) - - @classmethod - def log_path(cls, project, log): - """DEPRECATED. Return a fully-qualified log string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "projects/{project}/logs/{log}", project=project, log=log, - ) - - @classmethod - def organization_path(cls, organization): - """DEPRECATED. Return a fully-qualified organization string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization, - ) - - @classmethod - def organization_log_path(cls, organization, log): - """DEPRECATED. Return a fully-qualified organization_log string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "organizations/{organization}/logs/{log}", - organization=organization, - log=log, - ) - - @classmethod - def project_path(cls, project): - """DEPRECATED. Return a fully-qualified project string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.LoggingServiceV2GrpcTransport, - Callable[[~.Credentials, type], ~.LoggingServiceV2GrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = logging_service_v2_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def delete_log( - self, - log_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes all the log entries in a log. - The log reappears if it receives new entries. - Log entries written shortly before the delete operation might not be - deleted. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.LoggingServiceV2Client() - >>> - >>> log_name = client.log_path('[PROJECT]', '[LOG]') - >>> - >>> client.delete_log(log_name) - - Args: - log_name (str): Required. The resource name of the log to delete: - - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" - - ``[LOG_ID]`` must be URL-encoded. For example, - ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. - For more information about log names, see ``LogEntry``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_log" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_log" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_log, - default_retry=self._method_configs["DeleteLog"].retry, - default_timeout=self._method_configs["DeleteLog"].timeout, - client_info=self._client_info, - ) - - request = logging_pb2.DeleteLogRequest(log_name=log_name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("log_name", log_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_log"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def write_log_entries( - self, - entries, - log_name=None, - resource=None, - labels=None, - partial_success=None, - dry_run=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method - is used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use Logging. - A single request may contain log entries for a maximum of 1000 - different resources (projects, organizations, billing accounts or - folders) - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.LoggingServiceV2Client() - >>> - >>> # TODO: Initialize `entries`: - >>> entries = [] - >>> - >>> response = client.write_log_entries(entries) - - Args: - entries (list[Union[dict, ~google.cloud.logging_v2.types.LogEntry]]): Required. The log entries to send to Logging. The order of log entries - in this list does not matter. Values supplied in this method's - ``log_name``, ``resource``, and ``labels`` fields are copied into those - log entries in this list that do not include values for their - corresponding fields. For more information, see the ``LogEntry`` type. - - If the ``timestamp`` or ``insert_id`` fields are missing in log entries, - then this method supplies the current time or a unique identifier, - respectively. The supplied values are chosen so that, among the log - entries that did not supply their own values, the entries earlier in the - list will sort before the entries later in the list. See the - ``entries.list`` method. - - Log entries with timestamps that are more than the `logs retention - period `__ in the past or - more than 24 hours in the future will not be available when calling - ``entries.list``. However, those log entries can still be `exported with - LogSinks `__. - - To improve throughput and to avoid exceeding the `quota - limit `__ for calls to - ``entries.write``, you should try to include several log entries in this - list, rather than calling this method for each individual log entry. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogEntry` - log_name (str): Optional. A default log resource name that is assigned to all log - entries in ``entries`` that do not specify a value for ``log_name``: - - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" - - ``[LOG_ID]`` must be URL-encoded. For example: - - :: - - "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" - - The permission logging.logEntries.create is needed on each project, - organization, billing account, or folder that is receiving new log - entries, whether the resource is specified in logName or in an - individual log entry. - resource (Union[dict, ~google.cloud.logging_v2.types.MonitoredResource]): Optional. A default monitored resource object that is assigned to all - log entries in ``entries`` that do not specify a value for ``resource``. - Example: - - :: - - { "type": "gce_instance", - "labels": { - "zone": "us-central1-a", "instance_id": "00000000000000000000" }} - - See ``LogEntry``. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.MonitoredResource` - labels (dict[str -> str]): Optional. Default labels that are added to the ``labels`` field of all - log entries in ``entries``. If a log entry already has a label with the - same key as a label in this parameter, then the log entry's label is not - changed. See ``LogEntry``. - partial_success (bool): Optional. Whether valid entries should be written even if some other - entries fail due to INVALID\_ARGUMENT or PERMISSION\_DENIED errors. If - any entry is not written, then the response status is the error - associated with one of the failed entries and the response includes - error details keyed by the entries' zero-based index in the - ``entries.write`` method. - dry_run (bool): Optional. If true, the request should expect normal response, but the - entries won't be persisted nor exported. Useful for checking whether the - logging API endpoints are working properly before sending valuable data. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.WriteLogEntriesResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "write_log_entries" not in self._inner_api_calls: - self._inner_api_calls[ - "write_log_entries" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.write_log_entries, - default_retry=self._method_configs["WriteLogEntries"].retry, - default_timeout=self._method_configs["WriteLogEntries"].timeout, - client_info=self._client_info, - ) - - request = logging_pb2.WriteLogEntriesRequest( - entries=entries, - log_name=log_name, - resource=resource, - labels=labels, - partial_success=partial_success, - dry_run=dry_run, - ) - return self._inner_api_calls["write_log_entries"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_log_entries( - self, - resource_names, - project_ids=None, - filter_=None, - order_by=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists log entries. Use this method to retrieve log entries that - originated from a project/folder/organization/billing account. For ways - to export log entries, see `Exporting - Logs `__. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.LoggingServiceV2Client() - >>> - >>> # TODO: Initialize `resource_names`: - >>> resource_names = [] - >>> - >>> # Iterate over all results - >>> for element in client.list_log_entries(resource_names): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_log_entries(resource_names).pages: - ... for element in page: - ... # process element - ... pass - - Args: - resource_names (list[str]): Required. Names of one or more parent resources from which to retrieve - log entries: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - Projects listed in the ``project_ids`` field are added to this list. - project_ids (list[str]): Deprecated. Use ``resource_names`` instead. One or more project - identifiers or project numbers from which to retrieve log entries. - Example: ``"my-project-1A"``. - filter_ (str): Optional. A filter that chooses which log entries to return. See - `Advanced Logs - Filters `__. - Only log entries that match the filter are returned. An empty filter - matches all log entries in the resources listed in ``resource_names``. - Referencing a parent resource that is not listed in ``resource_names`` - will cause the filter to return no results. The maximum length of the - filter is 20000 characters. - order_by (str): Optional. How the results should be sorted. Presently, the only - permitted values are ``"timestamp asc"`` (default) and - ``"timestamp desc"``. The first option returns entries in order of - increasing values of ``LogEntry.timestamp`` (oldest first), and the - second option returns entries in order of decreasing timestamps (newest - first). Entries with equal timestamps are returned in order of their - ``insert_id`` values. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.logging_v2.types.LogEntry` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_log_entries" not in self._inner_api_calls: - self._inner_api_calls[ - "list_log_entries" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_log_entries, - default_retry=self._method_configs["ListLogEntries"].retry, - default_timeout=self._method_configs["ListLogEntries"].timeout, - client_info=self._client_info, - ) - - request = logging_pb2.ListLogEntriesRequest( - resource_names=resource_names, - project_ids=project_ids, - filter=filter_, - order_by=order_by, - page_size=page_size, - ) - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_log_entries"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="entries", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def list_monitored_resource_descriptors( - self, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the descriptors for monitored resource types used by Logging. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.LoggingServiceV2Client() - >>> - >>> # Iterate over all results - >>> for element in client.list_monitored_resource_descriptors(): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_monitored_resource_descriptors().pages: - ... for element in page: - ... # process element - ... pass - - Args: - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.logging_v2.types.MonitoredResourceDescriptor` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_monitored_resource_descriptors" not in self._inner_api_calls: - self._inner_api_calls[ - "list_monitored_resource_descriptors" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_monitored_resource_descriptors, - default_retry=self._method_configs[ - "ListMonitoredResourceDescriptors" - ].retry, - default_timeout=self._method_configs[ - "ListMonitoredResourceDescriptors" - ].timeout, - client_info=self._client_info, - ) - - request = logging_pb2.ListMonitoredResourceDescriptorsRequest( - page_size=page_size, - ) - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_monitored_resource_descriptors"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="resource_descriptors", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def list_logs( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the logs in projects, organizations, folders, or billing accounts. - Only logs that have entries are listed. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.LoggingServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_logs(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_logs(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The resource name that owns the logs: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`str` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_logs" not in self._inner_api_calls: - self._inner_api_calls[ - "list_logs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_logs, - default_retry=self._method_configs["ListLogs"].retry, - default_timeout=self._method_configs["ListLogs"].timeout, - client_info=self._client_info, - ) - - request = logging_pb2.ListLogsRequest(parent=parent, page_size=page_size,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_logs"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="log_names", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator diff --git a/logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py b/logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py deleted file mode 100644 index b3da612f6caf..000000000000 --- a/logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py +++ /dev/null @@ -1,62 +0,0 @@ -config = { - "interfaces": { - "google.logging.v2.LoggingServiceV2": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - }, - "list": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - }, - }, - "methods": { - "DeleteLog": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "WriteLogEntries": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - "bundling": { - "element_count_threshold": 1000, - "request_byte_threshold": 1048576, - "delay_threshold_millis": 50, - }, - }, - "ListLogEntries": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListMonitoredResourceDescriptors": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListLogs": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py deleted file mode 100644 index b127502ee3de..000000000000 --- a/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ /dev/null @@ -1,674 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.logging.v2 MetricsServiceV2 API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.api import monitored_resource_pb2 -from google.cloud.logging_v2.gapic import enums -from google.cloud.logging_v2.gapic import metrics_service_v2_client_config -from google.cloud.logging_v2.gapic.transports import metrics_service_v2_grpc_transport -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2_grpc -from google.cloud.logging_v2.proto import logging_metrics_pb2 -from google.cloud.logging_v2.proto import logging_metrics_pb2_grpc -from google.cloud.logging_v2.proto import logging_pb2 -from google.cloud.logging_v2.proto import logging_pb2_grpc -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version - - -class MetricsServiceV2Client(object): - """Service for configuring logs-based metrics.""" - - SERVICE_ADDRESS = "logging.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.logging.v2.MetricsServiceV2" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricsServiceV2Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def billing_path(cls, billing_account): - """DEPRECATED. Return a fully-qualified billing string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account, - ) - - @classmethod - def folder_path(cls, folder): - """DEPRECATED. Return a fully-qualified folder string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand("folders/{folder}", folder=folder,) - - @classmethod - def metric_path(cls, project, metric): - """DEPRECATED. Return a fully-qualified metric string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "projects/{project}/metrics/{metric}", project=project, metric=metric, - ) - - @classmethod - def organization_path(cls, organization): - """DEPRECATED. Return a fully-qualified organization string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization, - ) - - @classmethod - def project_path(cls, project): - """DEPRECATED. Return a fully-qualified project string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.MetricsServiceV2GrpcTransport, - Callable[[~.Credentials, type], ~.MetricsServiceV2GrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = metrics_service_v2_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def list_log_metrics( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists logs-based metrics. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.MetricsServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_log_metrics(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_log_metrics(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The name of the project containing the metrics: - - :: - - "projects/[PROJECT_ID]" - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.logging_v2.types.LogMetric` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_log_metrics" not in self._inner_api_calls: - self._inner_api_calls[ - "list_log_metrics" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_log_metrics, - default_retry=self._method_configs["ListLogMetrics"].retry, - default_timeout=self._method_configs["ListLogMetrics"].timeout, - client_info=self._client_info, - ) - - request = logging_metrics_pb2.ListLogMetricsRequest( - parent=parent, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_log_metrics"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="metrics", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_log_metric( - self, - metric_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a logs-based metric. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.MetricsServiceV2Client() - >>> - >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') - >>> - >>> response = client.get_log_metric(metric_name) - - Args: - metric_name (str): The resource name of the desired metric: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogMetric` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_log_metric" not in self._inner_api_calls: - self._inner_api_calls[ - "get_log_metric" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_log_metric, - default_retry=self._method_configs["GetLogMetric"].retry, - default_timeout=self._method_configs["GetLogMetric"].timeout, - client_info=self._client_info, - ) - - request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("metric_name", metric_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_log_metric"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_log_metric( - self, - parent, - metric, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a logs-based metric. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.MetricsServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `metric`: - >>> metric = {} - >>> - >>> response = client.create_log_metric(parent, metric) - - Args: - parent (str): The resource name of the project in which to create the metric: - - :: - - "projects/[PROJECT_ID]" - - The new metric must be provided in the request. - metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): The new logs-based metric, which must not have an identifier that - already exists. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogMetric` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogMetric` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_log_metric" not in self._inner_api_calls: - self._inner_api_calls[ - "create_log_metric" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_log_metric, - default_retry=self._method_configs["CreateLogMetric"].retry, - default_timeout=self._method_configs["CreateLogMetric"].timeout, - client_info=self._client_info, - ) - - request = logging_metrics_pb2.CreateLogMetricRequest( - parent=parent, metric=metric, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_log_metric"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_log_metric( - self, - metric_name, - metric, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates or updates a logs-based metric. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.MetricsServiceV2Client() - >>> - >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') - >>> - >>> # TODO: Initialize `metric`: - >>> metric = {} - >>> - >>> response = client.update_log_metric(metric_name, metric) - - Args: - metric_name (str): The resource name of the metric to update: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - - The updated metric must be provided in the request and it's ``name`` - field must be the same as ``[METRIC_ID]`` If the metric does not exist - in ``[PROJECT_ID]``, then a new metric is created. - metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): The updated metric. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogMetric` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogMetric` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_log_metric" not in self._inner_api_calls: - self._inner_api_calls[ - "update_log_metric" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_log_metric, - default_retry=self._method_configs["UpdateLogMetric"].retry, - default_timeout=self._method_configs["UpdateLogMetric"].timeout, - client_info=self._client_info, - ) - - request = logging_metrics_pb2.UpdateLogMetricRequest( - metric_name=metric_name, metric=metric, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("metric_name", metric_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_log_metric"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_log_metric( - self, - metric_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a logs-based metric. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.MetricsServiceV2Client() - >>> - >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') - >>> - >>> client.delete_log_metric(metric_name) - - Args: - metric_name (str): The resource name of the metric to delete: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_log_metric" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_log_metric" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_log_metric, - default_retry=self._method_configs["DeleteLogMetric"].retry, - default_timeout=self._method_configs["DeleteLogMetric"].timeout, - client_info=self._client_info, - ) - - request = logging_metrics_pb2.DeleteLogMetricRequest(metric_name=metric_name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("metric_name", metric_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_log_metric"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py b/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py deleted file mode 100644 index 133abec23dcf..000000000000 --- a/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py +++ /dev/null @@ -1,48 +0,0 @@ -config = { - "interfaces": { - "google.logging.v2.MetricsServiceV2": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "ListLogMetrics": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetLogMetric": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateLogMetric": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateLogMetric": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteLogMetric": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/logging/google/cloud/logging_v2/gapic/transports/__init__.py b/logging/google/cloud/logging_v2/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py deleted file mode 100644 index b85abcd58a78..000000000000 --- a/logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ /dev/null @@ -1,257 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.logging_v2.proto import logging_config_pb2_grpc - - -class ConfigServiceV2GrpcTransport(object): - """gRPC transport class providing stubs for - google.logging.v2 ConfigServiceV2 API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ) - - def __init__( - self, channel=None, credentials=None, address="logging.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "config_service_v2_stub": logging_config_pb2_grpc.ConfigServiceV2Stub( - channel - ), - } - - @classmethod - def create_channel( - cls, address="logging.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def list_sinks(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.list_sinks`. - - Lists sinks. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].ListSinks - - @property - def get_sink(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.get_sink`. - - Gets a sink. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].GetSink - - @property - def create_sink(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.create_sink`. - - Creates a sink that exports specified log entries to a destination. The - export of newly-ingested log entries begins immediately, unless the - sink's ``writer_identity`` is not permitted to write to the destination. - A sink can export log entries only from the resource owning the sink. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].CreateSink - - @property - def update_sink(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.update_sink`. - - Updates a sink. This method replaces the following fields in the - existing sink with values from the new sink: ``destination``, and - ``filter``. - - The updated sink might also have a new ``writer_identity``; see the - ``unique_writer_identity`` field. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].UpdateSink - - @property - def delete_sink(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.delete_sink`. - - Deletes a sink. If the sink has a unique ``writer_identity``, then that - service account is also deleted. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].DeleteSink - - @property - def list_exclusions(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.list_exclusions`. - - Lists all the exclusions in a parent resource. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].ListExclusions - - @property - def get_exclusion(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.get_exclusion`. - - Gets the description of an exclusion. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].GetExclusion - - @property - def create_exclusion(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.create_exclusion`. - - Creates a new exclusion in a specified parent resource. - Only log entries belonging to that resource can be excluded. - You can have up to 10 exclusions in a resource. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].CreateExclusion - - @property - def update_exclusion(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.update_exclusion`. - - Changes one or more properties of an existing exclusion. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].UpdateExclusion - - @property - def delete_exclusion(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.delete_exclusion`. - - Deletes an exclusion. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].DeleteExclusion diff --git a/logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py deleted file mode 100644 index f6ab3ab8876c..000000000000 --- a/logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ /dev/null @@ -1,192 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.logging_v2.proto import logging_pb2_grpc - - -class LoggingServiceV2GrpcTransport(object): - """gRPC transport class providing stubs for - google.logging.v2 LoggingServiceV2 API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ) - - def __init__( - self, channel=None, credentials=None, address="logging.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "logging_service_v2_stub": logging_pb2_grpc.LoggingServiceV2Stub(channel), - } - - @classmethod - def create_channel( - cls, address="logging.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def delete_log(self): - """Return the gRPC stub for :meth:`LoggingServiceV2Client.delete_log`. - - Deletes all the log entries in a log. - The log reappears if it receives new entries. - Log entries written shortly before the delete operation might not be - deleted. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["logging_service_v2_stub"].DeleteLog - - @property - def write_log_entries(self): - """Return the gRPC stub for :meth:`LoggingServiceV2Client.write_log_entries`. - - Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method - is used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use Logging. - A single request may contain log entries for a maximum of 1000 - different resources (projects, organizations, billing accounts or - folders) - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["logging_service_v2_stub"].WriteLogEntries - - @property - def list_log_entries(self): - """Return the gRPC stub for :meth:`LoggingServiceV2Client.list_log_entries`. - - Lists log entries. Use this method to retrieve log entries that - originated from a project/folder/organization/billing account. For ways - to export log entries, see `Exporting - Logs `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["logging_service_v2_stub"].ListLogEntries - - @property - def list_monitored_resource_descriptors(self): - """Return the gRPC stub for :meth:`LoggingServiceV2Client.list_monitored_resource_descriptors`. - - Lists the descriptors for monitored resource types used by Logging. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["logging_service_v2_stub"].ListMonitoredResourceDescriptors - - @property - def list_logs(self): - """Return the gRPC stub for :meth:`LoggingServiceV2Client.list_logs`. - - Lists the logs in projects, organizations, folders, or billing accounts. - Only logs that have entries are listed. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["logging_service_v2_stub"].ListLogs diff --git a/logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py deleted file mode 100644 index bc66722729bb..000000000000 --- a/logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py +++ /dev/null @@ -1,181 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.logging_v2.proto import logging_metrics_pb2_grpc - - -class MetricsServiceV2GrpcTransport(object): - """gRPC transport class providing stubs for - google.logging.v2 MetricsServiceV2 API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ) - - def __init__( - self, channel=None, credentials=None, address="logging.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "metrics_service_v2_stub": logging_metrics_pb2_grpc.MetricsServiceV2Stub( - channel - ), - } - - @classmethod - def create_channel( - cls, address="logging.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def list_log_metrics(self): - """Return the gRPC stub for :meth:`MetricsServiceV2Client.list_log_metrics`. - - Lists logs-based metrics. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metrics_service_v2_stub"].ListLogMetrics - - @property - def get_log_metric(self): - """Return the gRPC stub for :meth:`MetricsServiceV2Client.get_log_metric`. - - Gets a logs-based metric. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metrics_service_v2_stub"].GetLogMetric - - @property - def create_log_metric(self): - """Return the gRPC stub for :meth:`MetricsServiceV2Client.create_log_metric`. - - Creates a logs-based metric. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metrics_service_v2_stub"].CreateLogMetric - - @property - def update_log_metric(self): - """Return the gRPC stub for :meth:`MetricsServiceV2Client.update_log_metric`. - - Creates or updates a logs-based metric. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metrics_service_v2_stub"].UpdateLogMetric - - @property - def delete_log_metric(self): - """Return the gRPC stub for :meth:`MetricsServiceV2Client.delete_log_metric`. - - Deletes a logs-based metric. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metrics_service_v2_stub"].DeleteLogMetric diff --git a/logging/google/cloud/logging_v2/proto/__init__.py b/logging/google/cloud/logging_v2/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/logging/google/cloud/logging_v2/proto/log_entry.proto b/logging/google/cloud/logging_v2/proto/log_entry.proto deleted file mode 100644 index f0b037545199..000000000000 --- a/logging/google/cloud/logging_v2/proto/log_entry.proto +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.logging.v2; - -import "google/api/monitored_resource.proto"; -import "google/logging/type/http_request.proto"; -import "google/logging/type/log_severity.proto"; -import "google/protobuf/any.proto"; -import "google/protobuf/struct.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/status.proto"; -import "google/api/annotations.proto"; - -option cc_enable_arenas = true; -option csharp_namespace = "Google.Cloud.Logging.V2"; -option go_package = "google.golang.org/genproto/googleapis/logging/v2;logging"; -option java_multiple_files = true; -option java_outer_classname = "LogEntryProto"; -option java_package = "com.google.logging.v2"; -option php_namespace = "Google\\Cloud\\Logging\\V2"; - -// An individual entry in a log. -// -message LogEntry { - // Required. The resource name of the log to which this log entry belongs: - // - // "projects/[PROJECT_ID]/logs/[LOG_ID]" - // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - // "folders/[FOLDER_ID]/logs/[LOG_ID]" - // - // A project number may optionally be used in place of PROJECT_ID. The project - // number is translated to its corresponding PROJECT_ID internally and the - // `log_name` field will contain PROJECT_ID in queries and exports. - // - // `[LOG_ID]` must be URL-encoded within `log_name`. Example: - // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. - // `[LOG_ID]` must be less than 512 characters long and can only include the - // following characters: upper and lower case alphanumeric characters, - // forward-slash, underscore, hyphen, and period. - // - // For backward compatibility, if `log_name` begins with a forward-slash, such - // as `/projects/...`, then the log entry is ingested as usual but the - // forward-slash is removed. Listing the log entry will not show the leading - // slash and filtering for a log name with a leading slash will never return - // any results. - string log_name = 12; - - // Required. The monitored resource that produced this log entry. - // - // Example: a log entry that reports a database error would be associated with - // the monitored resource designating the particular database that reported - // the error. - google.api.MonitoredResource resource = 8; - - // Optional. The log entry payload, which can be one of multiple types. - oneof payload { - // The log entry payload, represented as a protocol buffer. Some Google - // Cloud Platform services use this field for their log entry payloads. - // - // The following protocol buffer types are supported; user-defined types - // are not supported: - // - // "type.googleapis.com/google.cloud.audit.AuditLog" - // "type.googleapis.com/google.appengine.logging.v1.RequestLog" - google.protobuf.Any proto_payload = 2; - - // The log entry payload, represented as a Unicode string (UTF-8). - string text_payload = 3; - - // The log entry payload, represented as a structure that is - // expressed as a JSON object. - google.protobuf.Struct json_payload = 6; - } - - // Optional. The time the event described by the log entry occurred. This - // time is used to compute the log entry's age and to enforce the logs - // retention period. If this field is omitted in a new log entry, then Logging - // assigns it the current time. Timestamps have nanosecond accuracy, but - // trailing zeros in the fractional seconds might be omitted when the - // timestamp is displayed. - // - // Incoming log entries should have timestamps that are no more than the [logs - // retention period](/logging/quotas) in the past, and no more than 24 hours - // in the future. Log entries outside those time boundaries will not be - // available when calling `entries.list`, but those log entries can still be - // [exported with LogSinks](/logging/docs/api/tasks/exporting-logs). - google.protobuf.Timestamp timestamp = 9; - - // Output only. The time the log entry was received by Logging. - google.protobuf.Timestamp receive_timestamp = 24; - - // Optional. The severity of the log entry. The default value is - // `LogSeverity.DEFAULT`. - google.logging.type.LogSeverity severity = 10; - - // Optional. A unique identifier for the log entry. If you provide a value, - // then Logging considers other log entries in the same project, with the same - // `timestamp`, and with the same `insert_id` to be duplicates which can be - // removed. If omitted in new log entries, then Logging assigns its own unique - // identifier. The `insert_id` is also used to order log entries that have the - // same `timestamp` value. - string insert_id = 4; - - // Optional. Information about the HTTP request associated with this log - // entry, if applicable. - google.logging.type.HttpRequest http_request = 7; - - // Optional. A set of user-defined (key, value) data that provides additional - // information about the log entry. - map labels = 11; - - // Deprecated. Output only. Additional metadata about the monitored resource. - // - // Only `k8s_container`, `k8s_pod`, and `k8s_node` MonitoredResources have - // this field populated for GKE versions older than 1.12.6. For GKE versions - // 1.12.6 and above, the `metadata` field has been deprecated. The Kubernetes - // pod labels that used to be in `metadata.userLabels` will now be present in - // the `labels` field with a key prefix of `k8s-pod/`. The Stackdriver system - // labels that were present in the `metadata.systemLabels` field will no - // longer be available in the LogEntry. - google.api.MonitoredResourceMetadata metadata = 25 [deprecated = true]; - - // Optional. Information about an operation associated with the log entry, if - // applicable. - LogEntryOperation operation = 15; - - // Optional. Resource name of the trace associated with the log entry, if any. - // If it contains a relative resource name, the name is assumed to be relative - // to `//tracing.googleapis.com`. Example: - // `projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824` - string trace = 22; - - // Optional. The span ID within the trace associated with the log entry. - // - // For Trace spans, this is the same format that the Trace API v2 uses: a - // 16-character hexadecimal encoding of an 8-byte array, such as - // "000000000000004a". - string span_id = 27; - - // Optional. The sampling decision of the trace associated with the log entry. - // - // True means that the trace resource name in the `trace` field was sampled - // for storage in a trace backend. False means that the trace was not sampled - // for storage when this log entry was written, or the sampling decision was - // unknown at the time. A non-sampled `trace` value is still useful as a - // request correlation identifier. The default is False. - bool trace_sampled = 30; - - // Optional. Source code location information associated with the log entry, - // if any. - LogEntrySourceLocation source_location = 23; -} - -// Additional information about a potentially long-running operation with which -// a log entry is associated. -message LogEntryOperation { - // Optional. An arbitrary operation identifier. Log entries with the same - // identifier are assumed to be part of the same operation. - string id = 1; - - // Optional. An arbitrary producer identifier. The combination of `id` and - // `producer` must be globally unique. Examples for `producer`: - // `"MyDivision.MyBigCompany.com"`, `"github.com/MyProject/MyApplication"`. - string producer = 2; - - // Optional. Set this to True if this is the first log entry in the operation. - bool first = 3; - - // Optional. Set this to True if this is the last log entry in the operation. - bool last = 4; -} - -// Additional information about the source code location that produced the log -// entry. -message LogEntrySourceLocation { - // Optional. Source file name. Depending on the runtime environment, this - // might be a simple name or a fully-qualified name. - string file = 1; - - // Optional. Line within the source file. 1-based; 0 indicates no line number - // available. - int64 line = 2; - - // Optional. Human-readable name of the function or method being invoked, with - // optional context such as the class or package name. This information may be - // used in contexts such as the logs viewer, where a file and line number are - // less meaningful. The format can vary by language. For example: - // `qual.if.ied.Class.method` (Java), `dir/package.func` (Go), `function` - // (Python). - string function = 3; -} diff --git a/logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/logging/google/cloud/logging_v2/proto/log_entry_pb2.py deleted file mode 100644 index c2517d84adae..000000000000 --- a/logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ /dev/null @@ -1,873 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/logging_v2/proto/log_entry.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.logging.type import ( - http_request_pb2 as google_dot_logging_dot_type_dot_http__request__pb2, -) -from google.logging.type import ( - log_severity_pb2 as google_dot_logging_dot_type_dot_log__severity__pb2, -) -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/logging_v2/proto/log_entry.proto", - package="google.logging.v2", - syntax="proto3", - serialized_options=_b( - "\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), - serialized_pb=_b( - '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\x8e\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12;\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadataB\x02\x18\x01\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR, - google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR, - google_dot_protobuf_dot_any__pb2.DESCRIPTOR, - google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_LOGENTRY_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.logging.v2.LogEntry.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.LogEntry.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.LogEntry.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1057, - serialized_end=1102, -) - -_LOGENTRY = _descriptor.Descriptor( - name="LogEntry", - full_name="google.logging.v2.LogEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_name", - full_name="google.logging.v2.LogEntry.log_name", - index=0, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource", - full_name="google.logging.v2.LogEntry.resource", - index=1, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="proto_payload", - full_name="google.logging.v2.LogEntry.proto_payload", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="text_payload", - full_name="google.logging.v2.LogEntry.text_payload", - index=3, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="json_payload", - full_name="google.logging.v2.LogEntry.json_payload", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="timestamp", - full_name="google.logging.v2.LogEntry.timestamp", - index=5, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="receive_timestamp", - full_name="google.logging.v2.LogEntry.receive_timestamp", - index=6, - number=24, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="severity", - full_name="google.logging.v2.LogEntry.severity", - index=7, - number=10, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="insert_id", - full_name="google.logging.v2.LogEntry.insert_id", - index=8, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="http_request", - full_name="google.logging.v2.LogEntry.http_request", - index=9, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.logging.v2.LogEntry.labels", - index=10, - number=11, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metadata", - full_name="google.logging.v2.LogEntry.metadata", - index=11, - number=25, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="operation", - full_name="google.logging.v2.LogEntry.operation", - index=12, - number=15, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="trace", - full_name="google.logging.v2.LogEntry.trace", - index=13, - number=22, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="span_id", - full_name="google.logging.v2.LogEntry.span_id", - index=14, - number=27, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="trace_sampled", - full_name="google.logging.v2.LogEntry.trace_sampled", - index=15, - number=30, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="source_location", - full_name="google.logging.v2.LogEntry.source_location", - index=16, - number=23, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_LOGENTRY_LABELSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="payload", - full_name="google.logging.v2.LogEntry.payload", - index=0, - containing_type=None, - fields=[], - ), - ], - serialized_start=331, - serialized_end=1113, -) - - -_LOGENTRYOPERATION = _descriptor.Descriptor( - name="LogEntryOperation", - full_name="google.logging.v2.LogEntryOperation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="id", - full_name="google.logging.v2.LogEntryOperation.id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="producer", - full_name="google.logging.v2.LogEntryOperation.producer", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="first", - full_name="google.logging.v2.LogEntryOperation.first", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="last", - full_name="google.logging.v2.LogEntryOperation.last", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1115, - serialized_end=1193, -) - - -_LOGENTRYSOURCELOCATION = _descriptor.Descriptor( - name="LogEntrySourceLocation", - full_name="google.logging.v2.LogEntrySourceLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="file", - full_name="google.logging.v2.LogEntrySourceLocation.file", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="line", - full_name="google.logging.v2.LogEntrySourceLocation.line", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="function", - full_name="google.logging.v2.LogEntrySourceLocation.function", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1195, - serialized_end=1265, -) - -_LOGENTRY_LABELSENTRY.containing_type = _LOGENTRY -_LOGENTRY.fields_by_name[ - "resource" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE -_LOGENTRY.fields_by_name[ - "proto_payload" -].message_type = google_dot_protobuf_dot_any__pb2._ANY -_LOGENTRY.fields_by_name[ - "json_payload" -].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT -_LOGENTRY.fields_by_name[ - "timestamp" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGENTRY.fields_by_name[ - "receive_timestamp" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGENTRY.fields_by_name[ - "severity" -].enum_type = google_dot_logging_dot_type_dot_log__severity__pb2._LOGSEVERITY -_LOGENTRY.fields_by_name[ - "http_request" -].message_type = google_dot_logging_dot_type_dot_http__request__pb2._HTTPREQUEST -_LOGENTRY.fields_by_name["labels"].message_type = _LOGENTRY_LABELSENTRY -_LOGENTRY.fields_by_name[ - "metadata" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEMETADATA -_LOGENTRY.fields_by_name["operation"].message_type = _LOGENTRYOPERATION -_LOGENTRY.fields_by_name["source_location"].message_type = _LOGENTRYSOURCELOCATION -_LOGENTRY.oneofs_by_name["payload"].fields.append( - _LOGENTRY.fields_by_name["proto_payload"] -) -_LOGENTRY.fields_by_name["proto_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ - "payload" -] -_LOGENTRY.oneofs_by_name["payload"].fields.append( - _LOGENTRY.fields_by_name["text_payload"] -) -_LOGENTRY.fields_by_name["text_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ - "payload" -] -_LOGENTRY.oneofs_by_name["payload"].fields.append( - _LOGENTRY.fields_by_name["json_payload"] -) -_LOGENTRY.fields_by_name["json_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ - "payload" -] -DESCRIPTOR.message_types_by_name["LogEntry"] = _LOGENTRY -DESCRIPTOR.message_types_by_name["LogEntryOperation"] = _LOGENTRYOPERATION -DESCRIPTOR.message_types_by_name["LogEntrySourceLocation"] = _LOGENTRYSOURCELOCATION -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LogEntry = _reflection.GeneratedProtocolMessageType( - "LogEntry", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_LOGENTRY_LABELSENTRY, - __module__="google.cloud.logging_v2.proto.log_entry_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry.LabelsEntry) - ), - ), - DESCRIPTOR=_LOGENTRY, - __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""An individual entry in a log. - - - Attributes: - log_name: - Required. The resource name of the log to which this log entry - belongs: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" A project number may - optionally be used in place of PROJECT\_ID. The project number - is translated to its corresponding PROJECT\_ID internally and - the ``log_name`` field will contain PROJECT\_ID in queries and - exports. ``[LOG_ID]`` must be URL-encoded within - ``log_name``. Example: ``"organizations/1234567890/logs/cloudr - esourcemanager.googleapis.com%2Factivity"``. ``[LOG_ID]`` must - be less than 512 characters long and can only include the - following characters: upper and lower case alphanumeric - characters, forward-slash, underscore, hyphen, and period. - For backward compatibility, if ``log_name`` begins with a - forward-slash, such as ``/projects/...``, then the log entry - is ingested as usual but the forward-slash is removed. Listing - the log entry will not show the leading slash and filtering - for a log name with a leading slash will never return any - results. - resource: - Required. The monitored resource that produced this log entry. - Example: a log entry that reports a database error would be - associated with the monitored resource designating the - particular database that reported the error. - payload: - Optional. The log entry payload, which can be one of multiple - types. - proto_payload: - The log entry payload, represented as a protocol buffer. Some - Google Cloud Platform services use this field for their log - entry payloads. The following protocol buffer types are - supported; user-defined types are not supported: - "type.googleapis.com/google.cloud.audit.AuditLog" - "type.googleapis.com/google.appengine.logging.v1.RequestLog" - text_payload: - The log entry payload, represented as a Unicode string - (UTF-8). - json_payload: - The log entry payload, represented as a structure that is - expressed as a JSON object. - timestamp: - Optional. The time the event described by the log entry - occurred. This time is used to compute the log entry's age and - to enforce the logs retention period. If this field is omitted - in a new log entry, then Logging assigns it the current time. - Timestamps have nanosecond accuracy, but trailing zeros in the - fractional seconds might be omitted when the timestamp is - displayed. Incoming log entries should have timestamps that - are no more than the `logs retention period - `__ in the past, and no more than 24 hours in - the future. Log entries outside those time boundaries will not - be available when calling ``entries.list``, but those log - entries can still be `exported with LogSinks - `__. - receive_timestamp: - Output only. The time the log entry was received by Logging. - severity: - Optional. The severity of the log entry. The default value is - ``LogSeverity.DEFAULT``. - insert_id: - Optional. A unique identifier for the log entry. If you - provide a value, then Logging considers other log entries in - the same project, with the same ``timestamp``, and with the - same ``insert_id`` to be duplicates which can be removed. If - omitted in new log entries, then Logging assigns its own - unique identifier. The ``insert_id`` is also used to order log - entries that have the same ``timestamp`` value. - http_request: - Optional. Information about the HTTP request associated with - this log entry, if applicable. - labels: - Optional. A set of user-defined (key, value) data that - provides additional information about the log entry. - metadata: - Deprecated. Output only. Additional metadata about the - monitored resource. Only ``k8s_container``, ``k8s_pod``, and - ``k8s_node`` MonitoredResources have this field populated for - GKE versions older than 1.12.6. For GKE versions 1.12.6 and - above, the ``metadata`` field has been deprecated. The - Kubernetes pod labels that used to be in - ``metadata.userLabels`` will now be present in the ``labels`` - field with a key prefix of ``k8s-pod/``. The Stackdriver - system labels that were present in the - ``metadata.systemLabels`` field will no longer be available in - the LogEntry. - operation: - Optional. Information about an operation associated with the - log entry, if applicable. - trace: - Optional. Resource name of the trace associated with the log - entry, if any. If it contains a relative resource name, the - name is assumed to be relative to - ``//tracing.googleapis.com``. Example: ``projects/my- - projectid/traces/06796866738c859f2f19b7cfb3214824`` - span_id: - Optional. The span ID within the trace associated with the log - entry. For Trace spans, this is the same format that the - Trace API v2 uses: a 16-character hexadecimal encoding of an - 8-byte array, such as "000000000000004a". - trace_sampled: - Optional. The sampling decision of the trace associated with - the log entry. True means that the trace resource name in the - ``trace`` field was sampled for storage in a trace backend. - False means that the trace was not sampled for storage when - this log entry was written, or the sampling decision was - unknown at the time. A non-sampled ``trace`` value is still - useful as a request correlation identifier. The default is - False. - source_location: - Optional. Source code location information associated with the - log entry, if any. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry) - ), -) -_sym_db.RegisterMessage(LogEntry) -_sym_db.RegisterMessage(LogEntry.LabelsEntry) - -LogEntryOperation = _reflection.GeneratedProtocolMessageType( - "LogEntryOperation", - (_message.Message,), - dict( - DESCRIPTOR=_LOGENTRYOPERATION, - __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""Additional information about a potentially long-running operation with - which a log entry is associated. - - - Attributes: - id: - Optional. An arbitrary operation identifier. Log entries with - the same identifier are assumed to be part of the same - operation. - producer: - Optional. An arbitrary producer identifier. The combination of - ``id`` and ``producer`` must be globally unique. Examples for - ``producer``: ``"MyDivision.MyBigCompany.com"``, - ``"github.com/MyProject/MyApplication"``. - first: - Optional. Set this to True if this is the first log entry in - the operation. - last: - Optional. Set this to True if this is the last log entry in - the operation. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntryOperation) - ), -) -_sym_db.RegisterMessage(LogEntryOperation) - -LogEntrySourceLocation = _reflection.GeneratedProtocolMessageType( - "LogEntrySourceLocation", - (_message.Message,), - dict( - DESCRIPTOR=_LOGENTRYSOURCELOCATION, - __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""Additional information about the source code location that produced the - log entry. - - - Attributes: - file: - Optional. Source file name. Depending on the runtime - environment, this might be a simple name or a fully-qualified - name. - line: - Optional. Line within the source file. 1-based; 0 indicates no - line number available. - function: - Optional. Human-readable name of the function or method being - invoked, with optional context such as the class or package - name. This information may be used in contexts such as the - logs viewer, where a file and line number are less meaningful. - The format can vary by language. For example: - ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` - (Go), ``function`` (Python). - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntrySourceLocation) - ), -) -_sym_db.RegisterMessage(LogEntrySourceLocation) - - -DESCRIPTOR._options = None -_LOGENTRY_LABELSENTRY._options = None -_LOGENTRY.fields_by_name["metadata"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py b/logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/logging/google/cloud/logging_v2/proto/logging.proto b/logging/google/cloud/logging_v2/proto/logging.proto deleted file mode 100644 index fc4217593770..000000000000 --- a/logging/google/cloud/logging_v2/proto/logging.proto +++ /dev/null @@ -1,357 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.logging.v2; - -import "google/api/monitored_resource.proto"; -import "google/logging/v2/log_entry.proto"; -import "google/logging/v2/logging_config.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/status.proto"; -import "google/api/annotations.proto"; -import "google/api/client.proto"; - -option cc_enable_arenas = true; -option csharp_namespace = "Google.Cloud.Logging.V2"; -option go_package = "google.golang.org/genproto/googleapis/logging/v2;logging"; -option java_multiple_files = true; -option java_outer_classname = "LoggingProto"; -option java_package = "com.google.logging.v2"; -option php_namespace = "Google\\Cloud\\Logging\\V2"; - -// Service for ingesting and querying logs. -service LoggingServiceV2 { - option (google.api.default_host) = "logging.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/cloud-platform.read-only," - "https://www.googleapis.com/auth/logging.admin," - "https://www.googleapis.com/auth/logging.read," - "https://www.googleapis.com/auth/logging.write"; - - // Deletes all the log entries in a log. - // The log reappears if it receives new entries. - // Log entries written shortly before the delete operation might not be - // deleted. - rpc DeleteLog(DeleteLogRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{log_name=projects/*/logs/*}" - additional_bindings { - delete: "/v2/{log_name=*/*/logs/*}" - } - additional_bindings { - delete: "/v2/{log_name=organizations/*/logs/*}" - } - additional_bindings { - delete: "/v2/{log_name=folders/*/logs/*}" - } - additional_bindings { - delete: "/v2/{log_name=billingAccounts/*/logs/*}" - } - }; - } - - // Writes log entries to Logging. This API method is the - // only way to send log entries to Logging. This method - // is used, directly or indirectly, by the Logging agent - // (fluentd) and all logging libraries configured to use Logging. - // A single request may contain log entries for a maximum of 1000 - // different resources (projects, organizations, billing accounts or - // folders) - rpc WriteLogEntries(WriteLogEntriesRequest) returns (WriteLogEntriesResponse) { - option (google.api.http) = { - post: "/v2/entries:write" - body: "*" - }; - } - - // Lists log entries. Use this method to retrieve log entries that originated - // from a project/folder/organization/billing account. For ways to export log - // entries, see [Exporting Logs](/logging/docs/export). - rpc ListLogEntries(ListLogEntriesRequest) returns (ListLogEntriesResponse) { - option (google.api.http) = { - post: "/v2/entries:list" - body: "*" - }; - } - - // Lists the descriptors for monitored resource types used by Logging. - rpc ListMonitoredResourceDescriptors(ListMonitoredResourceDescriptorsRequest) returns (ListMonitoredResourceDescriptorsResponse) { - option (google.api.http) = { - get: "/v2/monitoredResourceDescriptors" - }; - } - - // Lists the logs in projects, organizations, folders, or billing accounts. - // Only logs that have entries are listed. - rpc ListLogs(ListLogsRequest) returns (ListLogsResponse) { - option (google.api.http) = { - get: "/v2/{parent=*/*}/logs" - additional_bindings { - get: "/v2/{parent=projects/*}/logs" - } - additional_bindings { - get: "/v2/{parent=organizations/*}/logs" - } - additional_bindings { - get: "/v2/{parent=folders/*}/logs" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*}/logs" - } - }; - } -} - -// The parameters to DeleteLog. -message DeleteLogRequest { - // Required. The resource name of the log to delete: - // - // "projects/[PROJECT_ID]/logs/[LOG_ID]" - // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - // "folders/[FOLDER_ID]/logs/[LOG_ID]" - // - // `[LOG_ID]` must be URL-encoded. For example, - // `"projects/my-project-id/logs/syslog"`, - // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. - // For more information about log names, see - // [LogEntry][google.logging.v2.LogEntry]. - string log_name = 1; -} - -// The parameters to WriteLogEntries. -message WriteLogEntriesRequest { - // Optional. A default log resource name that is assigned to all log entries - // in `entries` that do not specify a value for `log_name`: - // - // "projects/[PROJECT_ID]/logs/[LOG_ID]" - // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - // "folders/[FOLDER_ID]/logs/[LOG_ID]" - // - // `[LOG_ID]` must be URL-encoded. For example: - // - // "projects/my-project-id/logs/syslog" - // "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" - // - // The permission logging.logEntries.create is needed on each - // project, organization, billing account, or folder that is receiving - // new log entries, whether the resource is specified in - // logName or in an individual log entry. - string log_name = 1; - - // Optional. A default monitored resource object that is assigned to all log - // entries in `entries` that do not specify a value for `resource`. Example: - // - // { "type": "gce_instance", - // "labels": { - // "zone": "us-central1-a", "instance_id": "00000000000000000000" }} - // - // See [LogEntry][google.logging.v2.LogEntry]. - google.api.MonitoredResource resource = 2; - - // Optional. Default labels that are added to the `labels` field of all log - // entries in `entries`. If a log entry already has a label with the same key - // as a label in this parameter, then the log entry's label is not changed. - // See [LogEntry][google.logging.v2.LogEntry]. - map labels = 3; - - // Required. The log entries to send to Logging. The order of log - // entries in this list does not matter. Values supplied in this method's - // `log_name`, `resource`, and `labels` fields are copied into those log - // entries in this list that do not include values for their corresponding - // fields. For more information, see the - // [LogEntry][google.logging.v2.LogEntry] type. - // - // If the `timestamp` or `insert_id` fields are missing in log entries, then - // this method supplies the current time or a unique identifier, respectively. - // The supplied values are chosen so that, among the log entries that did not - // supply their own values, the entries earlier in the list will sort before - // the entries later in the list. See the `entries.list` method. - // - // Log entries with timestamps that are more than the - // [logs retention period](/logging/quota-policy) in the past or more than - // 24 hours in the future will not be available when calling `entries.list`. - // However, those log entries can still be - // [exported with LogSinks](/logging/docs/api/tasks/exporting-logs). - // - // To improve throughput and to avoid exceeding the - // [quota limit](/logging/quota-policy) for calls to `entries.write`, - // you should try to include several log entries in this list, - // rather than calling this method for each individual log entry. - repeated LogEntry entries = 4; - - // Optional. Whether valid entries should be written even if some other - // entries fail due to INVALID_ARGUMENT or PERMISSION_DENIED errors. If any - // entry is not written, then the response status is the error associated - // with one of the failed entries and the response includes error details - // keyed by the entries' zero-based index in the `entries.write` method. - bool partial_success = 5; - - // Optional. If true, the request should expect normal response, but the - // entries won't be persisted nor exported. Useful for checking whether the - // logging API endpoints are working properly before sending valuable data. - bool dry_run = 6; -} - -// Result returned from WriteLogEntries. -// empty -message WriteLogEntriesResponse { - -} - -// Error details for WriteLogEntries with partial success. -message WriteLogEntriesPartialErrors { - // When `WriteLogEntriesRequest.partial_success` is true, records the error - // status for entries that were not written due to a permanent error, keyed - // by the entry's zero-based index in `WriteLogEntriesRequest.entries`. - // - // Failed requests for which no entries are written will not include - // per-entry errors. - map log_entry_errors = 1; -} - -// The parameters to `ListLogEntries`. -message ListLogEntriesRequest { - // Deprecated. Use `resource_names` instead. One or more project identifiers - // or project numbers from which to retrieve log entries. Example: - // `"my-project-1A"`. - repeated string project_ids = 1 [deprecated = true]; - - // Required. Names of one or more parent resources from which to - // retrieve log entries: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - // - // - // Projects listed in the `project_ids` field are added to this list. - repeated string resource_names = 8; - - // Optional. A filter that chooses which log entries to return. See [Advanced - // Logs Filters](/logging/docs/view/advanced_filters). Only log entries that - // match the filter are returned. An empty filter matches all log entries in - // the resources listed in `resource_names`. Referencing a parent resource - // that is not listed in `resource_names` will cause the filter to return no - // results. - // The maximum length of the filter is 20000 characters. - string filter = 2; - - // Optional. How the results should be sorted. Presently, the only permitted - // values are `"timestamp asc"` (default) and `"timestamp desc"`. The first - // option returns entries in order of increasing values of - // `LogEntry.timestamp` (oldest first), and the second option returns entries - // in order of decreasing timestamps (newest first). Entries with equal - // timestamps are returned in order of their `insert_id` values. - string order_by = 3; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `next_page_token` in the - // response indicates that more results might be available. - int32 page_size = 4; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `page_token` must be the value of - // `next_page_token` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 5; -} - -// Result returned from `ListLogEntries`. -message ListLogEntriesResponse { - // A list of log entries. If `entries` is empty, `nextPageToken` may still be - // returned, indicating that more entries may exist. See `nextPageToken` for - // more information. - repeated LogEntry entries = 1; - - // If there might be more results than those appearing in this response, then - // `nextPageToken` is included. To get the next set of results, call this - // method again using the value of `nextPageToken` as `pageToken`. - // - // If a value for `next_page_token` appears and the `entries` field is empty, - // it means that the search found no log entries so far but it did not have - // time to search all the possible log entries. Retry the method with this - // value for `page_token` to continue the search. Alternatively, consider - // speeding up the search by changing your filter to specify a single log name - // or resource type, or to narrow the time range of the search. - string next_page_token = 2; -} - -// The parameters to ListMonitoredResourceDescriptors -message ListMonitoredResourceDescriptorsRequest { - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 1; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 2; -} - -// Result returned from ListMonitoredResourceDescriptors. -message ListMonitoredResourceDescriptorsResponse { - // A list of resource descriptors. - repeated google.api.MonitoredResourceDescriptor resource_descriptors = 1; - - // If there might be more results than those appearing in this response, then - // `nextPageToken` is included. To get the next set of results, call this - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to ListLogs. -message ListLogsRequest { - // Required. The resource name that owns the logs: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - string parent = 1; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 2; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 3; -} - -// Result returned from ListLogs. -message ListLogsResponse { - // A list of log names. For example, - // `"projects/my-project/logs/syslog"` or - // `"organizations/123/logs/cloudresourcemanager.googleapis.com%2Factivity"`. - repeated string log_names = 3; - - // If there might be more results than those appearing in this response, then - // `nextPageToken` is included. To get the next set of results, call this - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} diff --git a/logging/google/cloud/logging_v2/proto/logging_config.proto b/logging/google/cloud/logging_v2/proto/logging_config.proto deleted file mode 100644 index a9ccdf51cb19..000000000000 --- a/logging/google/cloud/logging_v2/proto/logging_config.proto +++ /dev/null @@ -1,676 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.logging.v2; - -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/timestamp.proto"; -import "google/api/annotations.proto"; -import "google/api/client.proto"; - -option cc_enable_arenas = true; -option csharp_namespace = "Google.Cloud.Logging.V2"; -option go_package = "google.golang.org/genproto/googleapis/logging/v2;logging"; -option java_multiple_files = true; -option java_outer_classname = "LoggingConfigProto"; -option java_package = "com.google.logging.v2"; -option php_namespace = "Google\\Cloud\\Logging\\V2"; - -// Service for configuring sinks used to route log entries. -service ConfigServiceV2 { - option (google.api.default_host) = "logging.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/cloud-platform.read-only," - "https://www.googleapis.com/auth/logging.admin," - "https://www.googleapis.com/auth/logging.read"; - - // Lists sinks. - rpc ListSinks(ListSinksRequest) returns (ListSinksResponse) { - option (google.api.http) = { - get: "/v2/{parent=*/*}/sinks" - additional_bindings { - get: "/v2/{parent=projects/*}/sinks" - } - additional_bindings { - get: "/v2/{parent=organizations/*}/sinks" - } - additional_bindings { - get: "/v2/{parent=folders/*}/sinks" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*}/sinks" - } - }; - } - - // Gets a sink. - rpc GetSink(GetSinkRequest) returns (LogSink) { - option (google.api.http) = { - get: "/v2/{sink_name=*/*/sinks/*}" - additional_bindings { - get: "/v2/{sink_name=projects/*/sinks/*}" - } - additional_bindings { - get: "/v2/{sink_name=organizations/*/sinks/*}" - } - additional_bindings { - get: "/v2/{sink_name=folders/*/sinks/*}" - } - additional_bindings { - get: "/v2/{sink_name=billingAccounts/*/sinks/*}" - } - }; - } - - // Creates a sink that exports specified log entries to a destination. The - // export of newly-ingested log entries begins immediately, unless the sink's - // `writer_identity` is not permitted to write to the destination. A sink can - // export log entries only from the resource owning the sink. - rpc CreateSink(CreateSinkRequest) returns (LogSink) { - option (google.api.http) = { - post: "/v2/{parent=*/*}/sinks" - body: "sink" - additional_bindings { - post: "/v2/{parent=projects/*}/sinks" - body: "sink" - } - additional_bindings { - post: "/v2/{parent=organizations/*}/sinks" - body: "sink" - } - additional_bindings { - post: "/v2/{parent=folders/*}/sinks" - body: "sink" - } - additional_bindings { - post: "/v2/{parent=billingAccounts/*}/sinks" - body: "sink" - } - }; - } - - // Updates a sink. This method replaces the following fields in the existing - // sink with values from the new sink: `destination`, and `filter`. - // - // The updated sink might also have a new `writer_identity`; see the - // `unique_writer_identity` field. - rpc UpdateSink(UpdateSinkRequest) returns (LogSink) { - option (google.api.http) = { - put: "/v2/{sink_name=*/*/sinks/*}" - body: "sink" - additional_bindings { - put: "/v2/{sink_name=projects/*/sinks/*}" - body: "sink" - } - additional_bindings { - put: "/v2/{sink_name=organizations/*/sinks/*}" - body: "sink" - } - additional_bindings { - put: "/v2/{sink_name=folders/*/sinks/*}" - body: "sink" - } - additional_bindings { - put: "/v2/{sink_name=billingAccounts/*/sinks/*}" - body: "sink" - } - additional_bindings { - patch: "/v2/{sink_name=projects/*/sinks/*}" - body: "sink" - } - additional_bindings { - patch: "/v2/{sink_name=organizations/*/sinks/*}" - body: "sink" - } - additional_bindings { - patch: "/v2/{sink_name=folders/*/sinks/*}" - body: "sink" - } - additional_bindings { - patch: "/v2/{sink_name=billingAccounts/*/sinks/*}" - body: "sink" - } - }; - } - - // Deletes a sink. If the sink has a unique `writer_identity`, then that - // service account is also deleted. - rpc DeleteSink(DeleteSinkRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{sink_name=*/*/sinks/*}" - additional_bindings { - delete: "/v2/{sink_name=projects/*/sinks/*}" - } - additional_bindings { - delete: "/v2/{sink_name=organizations/*/sinks/*}" - } - additional_bindings { - delete: "/v2/{sink_name=folders/*/sinks/*}" - } - additional_bindings { - delete: "/v2/{sink_name=billingAccounts/*/sinks/*}" - } - }; - } - - // Lists all the exclusions in a parent resource. - rpc ListExclusions(ListExclusionsRequest) returns (ListExclusionsResponse) { - option (google.api.http) = { - get: "/v2/{parent=*/*}/exclusions" - additional_bindings { - get: "/v2/{parent=projects/*}/exclusions" - } - additional_bindings { - get: "/v2/{parent=organizations/*}/exclusions" - } - additional_bindings { - get: "/v2/{parent=folders/*}/exclusions" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*}/exclusions" - } - }; - } - - // Gets the description of an exclusion. - rpc GetExclusion(GetExclusionRequest) returns (LogExclusion) { - option (google.api.http) = { - get: "/v2/{name=*/*/exclusions/*}" - additional_bindings { - get: "/v2/{name=projects/*/exclusions/*}" - } - additional_bindings { - get: "/v2/{name=organizations/*/exclusions/*}" - } - additional_bindings { - get: "/v2/{name=folders/*/exclusions/*}" - } - additional_bindings { - get: "/v2/{name=billingAccounts/*/exclusions/*}" - } - }; - } - - // Creates a new exclusion in a specified parent resource. - // Only log entries belonging to that resource can be excluded. - // You can have up to 10 exclusions in a resource. - rpc CreateExclusion(CreateExclusionRequest) returns (LogExclusion) { - option (google.api.http) = { - post: "/v2/{parent=*/*}/exclusions" - body: "exclusion" - additional_bindings { - post: "/v2/{parent=projects/*}/exclusions" - body: "exclusion" - } - additional_bindings { - post: "/v2/{parent=organizations/*}/exclusions" - body: "exclusion" - } - additional_bindings { - post: "/v2/{parent=folders/*}/exclusions" - body: "exclusion" - } - additional_bindings { - post: "/v2/{parent=billingAccounts/*}/exclusions" - body: "exclusion" - } - }; - } - - // Changes one or more properties of an existing exclusion. - rpc UpdateExclusion(UpdateExclusionRequest) returns (LogExclusion) { - option (google.api.http) = { - patch: "/v2/{name=*/*/exclusions/*}" - body: "exclusion" - additional_bindings { - patch: "/v2/{name=projects/*/exclusions/*}" - body: "exclusion" - } - additional_bindings { - patch: "/v2/{name=organizations/*/exclusions/*}" - body: "exclusion" - } - additional_bindings { - patch: "/v2/{name=folders/*/exclusions/*}" - body: "exclusion" - } - additional_bindings { - patch: "/v2/{name=billingAccounts/*/exclusions/*}" - body: "exclusion" - } - }; - } - - // Deletes an exclusion. - rpc DeleteExclusion(DeleteExclusionRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{name=*/*/exclusions/*}" - additional_bindings { - delete: "/v2/{name=projects/*/exclusions/*}" - } - additional_bindings { - delete: "/v2/{name=organizations/*/exclusions/*}" - } - additional_bindings { - delete: "/v2/{name=folders/*/exclusions/*}" - } - additional_bindings { - delete: "/v2/{name=billingAccounts/*/exclusions/*}" - } - }; - } -} - -// Describes a sink used to export log entries to one of the following -// destinations in any project: a Cloud Storage bucket, a BigQuery dataset, or a -// Cloud Pub/Sub topic. A logs filter controls which log entries are exported. -// The sink must be created within a project, organization, billing account, or -// folder. -message LogSink { - // Available log entry formats. Log entries can be written to - // Logging in either format and can be exported in either format. - // Version 2 is the preferred format. - enum VersionFormat { - // An unspecified format version that will default to V2. - VERSION_FORMAT_UNSPECIFIED = 0; - - // `LogEntry` version 2 format. - V2 = 1; - - // `LogEntry` version 1 format. - V1 = 2; - } - - // Required. The client-assigned sink identifier, unique within the - // project. Example: `"my-syslog-errors-to-pubsub"`. Sink identifiers are - // limited to 100 characters and can include only the following characters: - // upper and lower-case alphanumeric characters, underscores, hyphens, and - // periods. - string name = 1; - - // Required. The export destination: - // - // "storage.googleapis.com/[GCS_BUCKET]" - // "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]" - // "pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" - // - // The sink's `writer_identity`, set when the sink is created, must - // have permission to write to the destination or else the log - // entries are not exported. For more information, see - // [Exporting Logs with Sinks](/logging/docs/api/tasks/exporting-logs). - string destination = 3; - - // Optional. An [advanced logs filter](/logging/docs/view/advanced-queries). The only - // exported log entries are those that are in the resource owning the sink and - // that match the filter. For example: - // - // logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR - string filter = 5; - - // Deprecated. The log entry format to use for this sink's exported log - // entries. The v2 format is used by default and cannot be changed. - VersionFormat output_version_format = 6 [deprecated = true]; - - // Output only. An IAM identity—a service account or group—under - // which Logging writes the exported log entries to the sink's destination. - // This field is set by - // [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - // and - // [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] - // based on the value of `unique_writer_identity` in those methods. - // - // Until you grant this identity write-access to the destination, log entry - // exports from this sink will fail. For more information, - // see [Granting Access for a - // Resource](/iam/docs/granting-roles-to-service-accounts#granting_access_to_a_service_account_for_a_resource). - // Consult the destination service's documentation to determine the - // appropriate IAM roles to assign to the identity. - string writer_identity = 8; - - // Optional. This field applies only to sinks owned by organizations and - // folders. If the field is false, the default, only the logs owned by the - // sink's parent resource are available for export. If the field is true, then - // logs from all the projects, folders, and billing accounts contained in the - // sink's parent resource are also available for export. Whether a particular - // log entry from the children is exported depends on the sink's filter - // expression. For example, if this field is true, then the filter - // `resource.type=gce_instance` would export all Compute Engine VM instance - // log entries from all projects in the sink's parent. To only export entries - // from certain child projects, filter on the project part of the log name: - // - // logName:("projects/test-project1/" OR "projects/test-project2/") AND - // resource.type=gce_instance - bool include_children = 9; - - // Optional. Destination dependent options. - oneof options { - // Optional. Options that affect sinks exporting data to BigQuery. - BigQueryOptions bigquery_options = 12; - } - - // Output only. The creation timestamp of the sink. - // - // This field may not be present for older sinks. - google.protobuf.Timestamp create_time = 13; - - // Output only. The last update timestamp of the sink. - // - // This field may not be present for older sinks. - google.protobuf.Timestamp update_time = 14; - - // Do not use. This field is ignored. - google.protobuf.Timestamp start_time = 10 [deprecated = true]; - - // Do not use. This field is ignored. - google.protobuf.Timestamp end_time = 11 [deprecated = true]; -} - -// Options that change functionality of a sink exporting data to BigQuery. -message BigQueryOptions { - // Optional. Whether to use [BigQuery's partition - // tables](/bigquery/docs/partitioned-tables). By default, Logging - // creates dated tables based on the log entries' timestamps, e.g. - // syslog_20170523. With partitioned tables the date suffix is no longer - // present and [special query - // syntax](/bigquery/docs/querying-partitioned-tables) has to be used instead. - // In both cases, tables are sharded based on UTC timezone. - bool use_partitioned_tables = 1; -} - -// The parameters to `ListSinks`. -message ListSinksRequest { - // Required. The parent resource whose sinks are to be listed: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - string parent = 1; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 2; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 3; -} - -// Result returned from `ListSinks`. -message ListSinksResponse { - // A list of sinks. - repeated LogSink sinks = 1; - - // If there might be more results than appear in this response, then - // `nextPageToken` is included. To get the next set of results, call the same - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to `GetSink`. -message GetSinkRequest { - // Required. The resource name of the sink: - // - // "projects/[PROJECT_ID]/sinks/[SINK_ID]" - // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - // "folders/[FOLDER_ID]/sinks/[SINK_ID]" - // - // Example: `"projects/my-project-id/sinks/my-sink-id"`. - string sink_name = 1; -} - -// The parameters to `CreateSink`. -message CreateSinkRequest { - // Required. The resource in which to create the sink: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - // - // Examples: `"projects/my-logging-project"`, `"organizations/123456789"`. - string parent = 1; - - // Required. The new sink, whose `name` parameter is a sink identifier that - // is not already in use. - LogSink sink = 2; - - // Optional. Determines the kind of IAM identity returned as `writer_identity` - // in the new sink. If this value is omitted or set to false, and if the - // sink's parent is a project, then the value returned as `writer_identity` is - // the same group or service account used by Logging before the addition of - // writer identities to this API. The sink's destination must be in the same - // project as the sink itself. - // - // If this field is set to true, or if the sink is owned by a non-project - // resource such as an organization, then the value of `writer_identity` will - // be a unique service account used only for exports from the new sink. For - // more information, see `writer_identity` in [LogSink][google.logging.v2.LogSink]. - bool unique_writer_identity = 3; -} - -// The parameters to `UpdateSink`. -message UpdateSinkRequest { - // Required. The full resource name of the sink to update, including the - // parent resource and the sink identifier: - // - // "projects/[PROJECT_ID]/sinks/[SINK_ID]" - // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - // "folders/[FOLDER_ID]/sinks/[SINK_ID]" - // - // Example: `"projects/my-project-id/sinks/my-sink-id"`. - string sink_name = 1; - - // Required. The updated sink, whose name is the same identifier that appears - // as part of `sink_name`. - LogSink sink = 2; - - // Optional. See [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - // for a description of this field. When updating a sink, the effect of this - // field on the value of `writer_identity` in the updated sink depends on both - // the old and new values of this field: - // - // + If the old and new values of this field are both false or both true, - // then there is no change to the sink's `writer_identity`. - // + If the old value is false and the new value is true, then - // `writer_identity` is changed to a unique service account. - // + It is an error if the old value is true and the new value is - // set to false or defaulted to false. - bool unique_writer_identity = 3; - - // Optional. Field mask that specifies the fields in `sink` that need - // an update. A sink field will be overwritten if, and only if, it is - // in the update mask. `name` and output only fields cannot be updated. - // - // An empty updateMask is temporarily treated as using the following mask - // for backwards compatibility purposes: - // destination,filter,includeChildren - // At some point in the future, behavior will be removed and specifying an - // empty updateMask will be an error. - // - // For a detailed `FieldMask` definition, see - // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - // - // Example: `updateMask=filter`. - google.protobuf.FieldMask update_mask = 4; -} - -// The parameters to `DeleteSink`. -message DeleteSinkRequest { - // Required. The full resource name of the sink to delete, including the - // parent resource and the sink identifier: - // - // "projects/[PROJECT_ID]/sinks/[SINK_ID]" - // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - // "folders/[FOLDER_ID]/sinks/[SINK_ID]" - // - // Example: `"projects/my-project-id/sinks/my-sink-id"`. - string sink_name = 1; -} - -// Specifies a set of log entries that are not to be stored in -// Logging. If your GCP resource receives a large volume of logs, you can -// use exclusions to reduce your chargeable logs. Exclusions are -// processed after log sinks, so you can export log entries before they are -// excluded. Note that organization-level and folder-level exclusions don't -// apply to child resources, and that you can't exclude audit log entries. -message LogExclusion { - // Required. A client-assigned identifier, such as - // `"load-balancer-exclusion"`. Identifiers are limited to 100 characters and - // can include only letters, digits, underscores, hyphens, and periods. - string name = 1; - - // Optional. A description of this exclusion. - string description = 2; - - // Required. An [advanced logs filter](/logging/docs/view/advanced-queries) - // that matches the log entries to be excluded. By using the - // [sample function](/logging/docs/view/advanced-queries#sample), - // you can exclude less than 100% of the matching log entries. - // For example, the following query matches 99% of low-severity log - // entries from Google Cloud Storage buckets: - // - // `"resource.type=gcs_bucket severity\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32".google.logging.v2.BigQueryOptionsH\x00\x12/\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\x42\t\n\x07options"1\n\x0f\x42igQueryOptions\x12\x1e\n\x16use_partitioned_tables\x18\x01 \x01(\x08"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"\xb5\x01\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\xf6\x1a\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\x1a\xdf\x01\xca\x41\x16logging.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.readB\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_LOGSINK_VERSIONFORMAT = _descriptor.EnumDescriptor( - name="VersionFormat", - full_name="google.logging.v2.LogSink.VersionFormat", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="VERSION_FORMAT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="V2", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="V1", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=711, - serialized_end=774, -) -_sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) - - -_LOGSINK = _descriptor.Descriptor( - name="LogSink", - full_name="google.logging.v2.LogSink", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.LogSink.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="destination", - full_name="google.logging.v2.LogSink.destination", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.LogSink.filter", - index=2, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="output_version_format", - full_name="google.logging.v2.LogSink.output_version_format", - index=3, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="writer_identity", - full_name="google.logging.v2.LogSink.writer_identity", - index=4, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="include_children", - full_name="google.logging.v2.LogSink.include_children", - index=5, - number=9, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bigquery_options", - full_name="google.logging.v2.LogSink.bigquery_options", - index=6, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.logging.v2.LogSink.create_time", - index=7, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.logging.v2.LogSink.update_time", - index=8, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.logging.v2.LogSink.start_time", - index=9, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.logging.v2.LogSink.end_time", - index=10, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_LOGSINK_VERSIONFORMAT,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="options", - full_name="google.logging.v2.LogSink.options", - index=0, - containing_type=None, - fields=[], - ), - ], - serialized_start=257, - serialized_end=785, -) - - -_BIGQUERYOPTIONS = _descriptor.Descriptor( - name="BigQueryOptions", - full_name="google.logging.v2.BigQueryOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="use_partitioned_tables", - full_name="google.logging.v2.BigQueryOptions.use_partitioned_tables", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=787, - serialized_end=836, -) - - -_LISTSINKSREQUEST = _descriptor.Descriptor( - name="ListSinksRequest", - full_name="google.logging.v2.ListSinksRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListSinksRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListSinksRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListSinksRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=838, - serialized_end=911, -) - - -_LISTSINKSRESPONSE = _descriptor.Descriptor( - name="ListSinksResponse", - full_name="google.logging.v2.ListSinksResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sinks", - full_name="google.logging.v2.ListSinksResponse.sinks", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListSinksResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=913, - serialized_end=1000, -) - - -_GETSINKREQUEST = _descriptor.Descriptor( - name="GetSinkRequest", - full_name="google.logging.v2.GetSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sink_name", - full_name="google.logging.v2.GetSinkRequest.sink_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1002, - serialized_end=1037, -) - - -_CREATESINKREQUEST = _descriptor.Descriptor( - name="CreateSinkRequest", - full_name="google.logging.v2.CreateSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.CreateSinkRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sink", - full_name="google.logging.v2.CreateSinkRequest.sink", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="unique_writer_identity", - full_name="google.logging.v2.CreateSinkRequest.unique_writer_identity", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1039, - serialized_end=1148, -) - - -_UPDATESINKREQUEST = _descriptor.Descriptor( - name="UpdateSinkRequest", - full_name="google.logging.v2.UpdateSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sink_name", - full_name="google.logging.v2.UpdateSinkRequest.sink_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sink", - full_name="google.logging.v2.UpdateSinkRequest.sink", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="unique_writer_identity", - full_name="google.logging.v2.UpdateSinkRequest.unique_writer_identity", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.logging.v2.UpdateSinkRequest.update_mask", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1151, - serialized_end=1312, -) - - -_DELETESINKREQUEST = _descriptor.Descriptor( - name="DeleteSinkRequest", - full_name="google.logging.v2.DeleteSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sink_name", - full_name="google.logging.v2.DeleteSinkRequest.sink_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1314, - serialized_end=1352, -) - - -_LOGEXCLUSION = _descriptor.Descriptor( - name="LogExclusion", - full_name="google.logging.v2.LogExclusion", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.LogExclusion.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.logging.v2.LogExclusion.description", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.LogExclusion.filter", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="disabled", - full_name="google.logging.v2.LogExclusion.disabled", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.logging.v2.LogExclusion.create_time", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.logging.v2.LogExclusion.update_time", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1355, - serialized_end=1536, -) - - -_LISTEXCLUSIONSREQUEST = _descriptor.Descriptor( - name="ListExclusionsRequest", - full_name="google.logging.v2.ListExclusionsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListExclusionsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListExclusionsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListExclusionsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1538, - serialized_end=1616, -) - - -_LISTEXCLUSIONSRESPONSE = _descriptor.Descriptor( - name="ListExclusionsResponse", - full_name="google.logging.v2.ListExclusionsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="exclusions", - full_name="google.logging.v2.ListExclusionsResponse.exclusions", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListExclusionsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1618, - serialized_end=1720, -) - - -_GETEXCLUSIONREQUEST = _descriptor.Descriptor( - name="GetExclusionRequest", - full_name="google.logging.v2.GetExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.GetExclusionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1722, - serialized_end=1757, -) - - -_CREATEEXCLUSIONREQUEST = _descriptor.Descriptor( - name="CreateExclusionRequest", - full_name="google.logging.v2.CreateExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.CreateExclusionRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exclusion", - full_name="google.logging.v2.CreateExclusionRequest.exclusion", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1759, - serialized_end=1851, -) - - -_UPDATEEXCLUSIONREQUEST = _descriptor.Descriptor( - name="UpdateExclusionRequest", - full_name="google.logging.v2.UpdateExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.UpdateExclusionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exclusion", - full_name="google.logging.v2.UpdateExclusionRequest.exclusion", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.logging.v2.UpdateExclusionRequest.update_mask", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1854, - serialized_end=1993, -) - - -_DELETEEXCLUSIONREQUEST = _descriptor.Descriptor( - name="DeleteExclusionRequest", - full_name="google.logging.v2.DeleteExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.DeleteExclusionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1995, - serialized_end=2033, -) - -_LOGSINK.fields_by_name["output_version_format"].enum_type = _LOGSINK_VERSIONFORMAT -_LOGSINK.fields_by_name["bigquery_options"].message_type = _BIGQUERYOPTIONS -_LOGSINK.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK_VERSIONFORMAT.containing_type = _LOGSINK -_LOGSINK.oneofs_by_name["options"].fields.append( - _LOGSINK.fields_by_name["bigquery_options"] -) -_LOGSINK.fields_by_name["bigquery_options"].containing_oneof = _LOGSINK.oneofs_by_name[ - "options" -] -_LISTSINKSRESPONSE.fields_by_name["sinks"].message_type = _LOGSINK -_CREATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK -_UPDATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK -_UPDATESINKREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LOGEXCLUSION.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGEXCLUSION.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LISTEXCLUSIONSRESPONSE.fields_by_name["exclusions"].message_type = _LOGEXCLUSION -_CREATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION -_UPDATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION -_UPDATEEXCLUSIONREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -DESCRIPTOR.message_types_by_name["LogSink"] = _LOGSINK -DESCRIPTOR.message_types_by_name["BigQueryOptions"] = _BIGQUERYOPTIONS -DESCRIPTOR.message_types_by_name["ListSinksRequest"] = _LISTSINKSREQUEST -DESCRIPTOR.message_types_by_name["ListSinksResponse"] = _LISTSINKSRESPONSE -DESCRIPTOR.message_types_by_name["GetSinkRequest"] = _GETSINKREQUEST -DESCRIPTOR.message_types_by_name["CreateSinkRequest"] = _CREATESINKREQUEST -DESCRIPTOR.message_types_by_name["UpdateSinkRequest"] = _UPDATESINKREQUEST -DESCRIPTOR.message_types_by_name["DeleteSinkRequest"] = _DELETESINKREQUEST -DESCRIPTOR.message_types_by_name["LogExclusion"] = _LOGEXCLUSION -DESCRIPTOR.message_types_by_name["ListExclusionsRequest"] = _LISTEXCLUSIONSREQUEST -DESCRIPTOR.message_types_by_name["ListExclusionsResponse"] = _LISTEXCLUSIONSRESPONSE -DESCRIPTOR.message_types_by_name["GetExclusionRequest"] = _GETEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name["CreateExclusionRequest"] = _CREATEEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name["UpdateExclusionRequest"] = _UPDATEEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name["DeleteExclusionRequest"] = _DELETEEXCLUSIONREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LogSink = _reflection.GeneratedProtocolMessageType( - "LogSink", - (_message.Message,), - dict( - DESCRIPTOR=_LOGSINK, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Describes a sink used to export log entries to one of the following - destinations in any project: a Cloud Storage bucket, a BigQuery dataset, - or a Cloud Pub/Sub topic. A logs filter controls which log entries are - exported. The sink must be created within a project, organization, - billing account, or folder. - - - Attributes: - name: - Required. The client-assigned sink identifier, unique within - the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink - identifiers are limited to 100 characters and can include only - the following characters: upper and lower-case alphanumeric - characters, underscores, hyphens, and periods. - destination: - Required. The export destination: :: - "storage.googleapis.com/[GCS_BUCKET]" "bigquery.googleapis - .com/projects/[PROJECT_ID]/datasets/[DATASET]" "pubsub.goo - gleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" The - sink's ``writer_identity``, set when the sink is created, must - have permission to write to the destination or else the log - entries are not exported. For more information, see `Exporting - Logs with Sinks `__. - filter: - Optional. An `advanced logs filter - `__. The only exported - log entries are those that are in the resource owning the sink - and that match the filter. For example: :: - logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND - severity>=ERROR - output_version_format: - Deprecated. The log entry format to use for this sink's - exported log entries. The v2 format is used by default and - cannot be changed. - writer_identity: - Output only. An IAM identity—a service account or group—under - which Logging writes the exported log entries to the sink's - destination. This field is set by - [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - and - [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] - based on the value of ``unique_writer_identity`` in those - methods. Until you grant this identity write-access to the - destination, log entry exports from this sink will fail. For - more information, see `Granting Access for a Resource - `__. Consult the - destination service's documentation to determine the - appropriate IAM roles to assign to the identity. - include_children: - Optional. This field applies only to sinks owned by - organizations and folders. If the field is false, the default, - only the logs owned by the sink's parent resource are - available for export. If the field is true, then logs from all - the projects, folders, and billing accounts contained in the - sink's parent resource are also available for export. Whether - a particular log entry from the children is exported depends - on the sink's filter expression. For example, if this field is - true, then the filter ``resource.type=gce_instance`` would - export all Compute Engine VM instance log entries from all - projects in the sink's parent. To only export entries from - certain child projects, filter on the project part of the log - name: :: logName:("projects/test-project1/" OR - "projects/test-project2/") AND resource.type=gce_instance - options: - Optional. Destination dependent options. - bigquery_options: - Optional. Options that affect sinks exporting data to - BigQuery. - create_time: - Output only. The creation timestamp of the sink. This field - may not be present for older sinks. - update_time: - Output only. The last update timestamp of the sink. This - field may not be present for older sinks. - start_time: - Do not use. This field is ignored. - end_time: - Do not use. This field is ignored. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) - ), -) -_sym_db.RegisterMessage(LogSink) - -BigQueryOptions = _reflection.GeneratedProtocolMessageType( - "BigQueryOptions", - (_message.Message,), - dict( - DESCRIPTOR=_BIGQUERYOPTIONS, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Options that change functionality of a sink exporting data to BigQuery. - - - Attributes: - use_partitioned_tables: - Optional. Whether to use `BigQuery's partition tables - `__. By default, Logging - creates dated tables based on the log entries' timestamps, - e.g. syslog\_20170523. With partitioned tables the date suffix - is no longer present and `special query syntax - `__ has to be used - instead. In both cases, tables are sharded based on UTC - timezone. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.BigQueryOptions) - ), -) -_sym_db.RegisterMessage(BigQueryOptions) - -ListSinksRequest = _reflection.GeneratedProtocolMessageType( - "ListSinksRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTSINKSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``ListSinks``. - - - Attributes: - parent: - Required. The parent resource whose sinks are to be listed: - :: "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksRequest) - ), -) -_sym_db.RegisterMessage(ListSinksRequest) - -ListSinksResponse = _reflection.GeneratedProtocolMessageType( - "ListSinksResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTSINKSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Result returned from ``ListSinks``. - - - Attributes: - sinks: - A list of sinks. - next_page_token: - If there might be more results than appear in this response, - then ``nextPageToken`` is included. To get the next set of - results, call the same method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksResponse) - ), -) -_sym_db.RegisterMessage(ListSinksResponse) - -GetSinkRequest = _reflection.GeneratedProtocolMessageType( - "GetSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETSINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``GetSink``. - - - Attributes: - sink_name: - Required. The resource name of the sink: :: - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: - ``"projects/my-project-id/sinks/my-sink-id"``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.GetSinkRequest) - ), -) -_sym_db.RegisterMessage(GetSinkRequest) - -CreateSinkRequest = _reflection.GeneratedProtocolMessageType( - "CreateSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATESINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``CreateSink``. - - - Attributes: - parent: - Required. The resource in which to create the sink: :: - "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" Examples: ``"projects/my-logging- - project"``, ``"organizations/123456789"``. - sink: - Required. The new sink, whose ``name`` parameter is a sink - identifier that is not already in use. - unique_writer_identity: - Optional. Determines the kind of IAM identity returned as - ``writer_identity`` in the new sink. If this value is omitted - or set to false, and if the sink's parent is a project, then - the value returned as ``writer_identity`` is the same group or - service account used by Logging before the addition of writer - identities to this API. The sink's destination must be in the - same project as the sink itself. If this field is set to - true, or if the sink is owned by a non-project resource such - as an organization, then the value of ``writer_identity`` will - be a unique service account used only for exports from the new - sink. For more information, see ``writer_identity`` in - [LogSink][google.logging.v2.LogSink]. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.CreateSinkRequest) - ), -) -_sym_db.RegisterMessage(CreateSinkRequest) - -UpdateSinkRequest = _reflection.GeneratedProtocolMessageType( - "UpdateSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATESINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``UpdateSink``. - - - Attributes: - sink_name: - Required. The full resource name of the sink to update, - including the parent resource and the sink identifier: :: - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: - ``"projects/my-project-id/sinks/my-sink-id"``. - sink: - Required. The updated sink, whose name is the same identifier - that appears as part of ``sink_name``. - unique_writer_identity: - Optional. See - [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - for a description of this field. When updating a sink, the - effect of this field on the value of ``writer_identity`` in - the updated sink depends on both the old and new values of - this field: - If the old and new values of this field are - both false or both true, then there is no change to the - sink's ``writer_identity``. - If the old value is false and - the new value is true, then ``writer_identity`` is changed - to a unique service account. - It is an error if the old - value is true and the new value is set to false or - defaulted to false. - update_mask: - Optional. Field mask that specifies the fields in ``sink`` - that need an update. A sink field will be overwritten if, and - only if, it is in the update mask. ``name`` and output only - fields cannot be updated. An empty updateMask is temporarily - treated as using the following mask for backwards - compatibility purposes: destination,filter,includeChildren At - some point in the future, behavior will be removed and - specifying an empty updateMask will be an error. For a - detailed ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/ - google.protobuf#google.protobuf.FieldMask Example: - ``updateMask=filter``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateSinkRequest) - ), -) -_sym_db.RegisterMessage(UpdateSinkRequest) - -DeleteSinkRequest = _reflection.GeneratedProtocolMessageType( - "DeleteSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETESINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``DeleteSink``. - - - Attributes: - sink_name: - Required. The full resource name of the sink to delete, - including the parent resource and the sink identifier: :: - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: - ``"projects/my-project-id/sinks/my-sink-id"``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteSinkRequest) - ), -) -_sym_db.RegisterMessage(DeleteSinkRequest) - -LogExclusion = _reflection.GeneratedProtocolMessageType( - "LogExclusion", - (_message.Message,), - dict( - DESCRIPTOR=_LOGEXCLUSION, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Specifies a set of log entries that are not to be stored in Logging. If - your GCP resource receives a large volume of logs, you can use - exclusions to reduce your chargeable logs. Exclusions are processed - after log sinks, so you can export log entries before they are excluded. - Note that organization-level and folder-level exclusions don't apply to - child resources, and that you can't exclude audit log entries. - - - Attributes: - name: - Required. A client-assigned identifier, such as ``"load- - balancer-exclusion"``. Identifiers are limited to 100 - characters and can include only letters, digits, underscores, - hyphens, and periods. - description: - Optional. A description of this exclusion. - filter: - Required. An `advanced logs filter - `__ that matches the log - entries to be excluded. By using the `sample function - `__, you can - exclude less than 100% of the matching log entries. For - example, the following query matches 99% of low-severity log - entries from Google Cloud Storage buckets: - ``"resource.type=gcs_bucket severity=ERROR" - // - // The maximum length of the filter is 20000 characters. - string filter = 3; - - // Optional. The metric descriptor associated with the logs-based metric. - // If unspecified, it uses a default metric descriptor with a DELTA metric - // kind, INT64 value type, with no labels and a unit of "1". Such a metric - // counts the number of log entries matching the `filter` expression. - // - // The `name`, `type`, and `description` fields in the `metric_descriptor` - // are output only, and is constructed using the `name` and `description` - // field in the LogMetric. - // - // To create a logs-based metric that records a distribution of log values, a - // DELTA metric kind with a DISTRIBUTION value type must be used along with - // a `value_extractor` expression in the LogMetric. - // - // Each label in the metric descriptor must have a matching label - // name as the key and an extractor expression as the value in the - // `label_extractors` map. - // - // The `metric_kind` and `value_type` fields in the `metric_descriptor` cannot - // be updated once initially configured. New labels can be added in the - // `metric_descriptor`, but existing labels cannot be modified except for - // their description. - google.api.MetricDescriptor metric_descriptor = 5; - - // Optional. A `value_extractor` is required when using a distribution - // logs-based metric to extract the values to record from a log entry. - // Two functions are supported for value extraction: `EXTRACT(field)` or - // `REGEXP_EXTRACT(field, regex)`. The argument are: - // 1. field: The name of the log entry field from which the value is to be - // extracted. - // 2. regex: A regular expression using the Google RE2 syntax - // (https://github.com/google/re2/wiki/Syntax) with a single capture - // group to extract data from the specified log entry field. The value - // of the field is converted to a string before applying the regex. - // It is an error to specify a regex that does not include exactly one - // capture group. - // - // The result of the extraction must be convertible to a double type, as the - // distribution always records double values. If either the extraction or - // the conversion to double fails, then those values are not recorded in the - // distribution. - // - // Example: `REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")` - string value_extractor = 6; - - // Optional. A map from a label key string to an extractor expression which is - // used to extract data from a log entry field and assign as the label value. - // Each label key specified in the LabelDescriptor must have an associated - // extractor expression in this map. The syntax of the extractor expression - // is the same as for the `value_extractor` field. - // - // The extracted value is converted to the type defined in the label - // descriptor. If the either the extraction or the type conversion fails, - // the label will have a default value. The default value for a string - // label is an empty string, for an integer label its 0, and for a boolean - // label its `false`. - // - // Note that there are upper bounds on the maximum number of labels and the - // number of active time series that are allowed in a project. - map label_extractors = 7; - - // Optional. The `bucket_options` are required when the logs-based metric is - // using a DISTRIBUTION value type and it describes the bucket boundaries - // used to create a histogram of the extracted values. - google.api.Distribution.BucketOptions bucket_options = 8; - - // Output only. The creation timestamp of the metric. - // - // This field may not be present for older metrics. - google.protobuf.Timestamp create_time = 9; - - // Output only. The last update timestamp of the metric. - // - // This field may not be present for older metrics. - google.protobuf.Timestamp update_time = 10; - - // Deprecated. The API version that created or updated this metric. - // The v2 format is used by default and cannot be changed. - ApiVersion version = 4 [deprecated = true]; -} - -// The parameters to ListLogMetrics. -message ListLogMetricsRequest { - // Required. The name of the project containing the metrics: - // - // "projects/[PROJECT_ID]" - string parent = 1; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 2; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 3; -} - -// Result returned from ListLogMetrics. -message ListLogMetricsResponse { - // A list of logs-based metrics. - repeated LogMetric metrics = 1; - - // If there might be more results than appear in this response, then - // `nextPageToken` is included. To get the next set of results, call this - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to GetLogMetric. -message GetLogMetricRequest { - // The resource name of the desired metric: - // - // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - string metric_name = 1; -} - -// The parameters to CreateLogMetric. -message CreateLogMetricRequest { - // The resource name of the project in which to create the metric: - // - // "projects/[PROJECT_ID]" - // - // The new metric must be provided in the request. - string parent = 1; - - // The new logs-based metric, which must not have an identifier that - // already exists. - LogMetric metric = 2; -} - -// The parameters to UpdateLogMetric. -message UpdateLogMetricRequest { - // The resource name of the metric to update: - // - // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - // - // The updated metric must be provided in the request and it's - // `name` field must be the same as `[METRIC_ID]` If the metric - // does not exist in `[PROJECT_ID]`, then a new metric is created. - string metric_name = 1; - - // The updated metric. - LogMetric metric = 2; -} - -// The parameters to DeleteLogMetric. -message DeleteLogMetricRequest { - // The resource name of the metric to delete: - // - // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - string metric_name = 1; -} diff --git a/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py deleted file mode 100644 index 1addc0a0b592..000000000000 --- a/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ /dev/null @@ -1,1018 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/logging_v2/proto/logging_metrics.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import distribution_pb2 as google_dot_api_dot_distribution__pb2 -from google.api import metric_pb2 as google_dot_api_dot_metric__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/logging_v2/proto/logging_metrics.proto", - package="google.logging.v2", - syntax="proto3", - serialized_options=_b( - "\n\025com.google.logging.v2B\023LoggingMetricsProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), - serialized_pb=_b( - '\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\x93\x04\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12/\n\x0b\x63reate_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xe4\x07\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"/\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_distribution__pb2.DESCRIPTOR, - google_dot_api_dot_metric__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_LOGMETRIC_APIVERSION = _descriptor.EnumDescriptor( - name="ApiVersion", - full_name="google.logging.v2.LogMetric.ApiVersion", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="V2", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="V1", index=1, number=1, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=785, - serialized_end=813, -) -_sym_db.RegisterEnumDescriptor(_LOGMETRIC_APIVERSION) - - -_LOGMETRIC_LABELEXTRACTORSENTRY = _descriptor.Descriptor( - name="LabelExtractorsEntry", - full_name="google.logging.v2.LogMetric.LabelExtractorsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.LogMetric.LabelExtractorsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.LogMetric.LabelExtractorsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=729, - serialized_end=783, -) - -_LOGMETRIC = _descriptor.Descriptor( - name="LogMetric", - full_name="google.logging.v2.LogMetric", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.LogMetric.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.logging.v2.LogMetric.description", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.LogMetric.filter", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric_descriptor", - full_name="google.logging.v2.LogMetric.metric_descriptor", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value_extractor", - full_name="google.logging.v2.LogMetric.value_extractor", - index=4, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="label_extractors", - full_name="google.logging.v2.LogMetric.label_extractors", - index=5, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_options", - full_name="google.logging.v2.LogMetric.bucket_options", - index=6, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.logging.v2.LogMetric.create_time", - index=7, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.logging.v2.LogMetric.update_time", - index=8, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.logging.v2.LogMetric.version", - index=9, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY,], - enum_types=[_LOGMETRIC_APIVERSION,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=282, - serialized_end=813, -) - - -_LISTLOGMETRICSREQUEST = _descriptor.Descriptor( - name="ListLogMetricsRequest", - full_name="google.logging.v2.ListLogMetricsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListLogMetricsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListLogMetricsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListLogMetricsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=815, - serialized_end=893, -) - - -_LISTLOGMETRICSRESPONSE = _descriptor.Descriptor( - name="ListLogMetricsResponse", - full_name="google.logging.v2.ListLogMetricsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metrics", - full_name="google.logging.v2.ListLogMetricsResponse.metrics", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListLogMetricsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=895, - serialized_end=991, -) - - -_GETLOGMETRICREQUEST = _descriptor.Descriptor( - name="GetLogMetricRequest", - full_name="google.logging.v2.GetLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric_name", - full_name="google.logging.v2.GetLogMetricRequest.metric_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=993, - serialized_end=1035, -) - - -_CREATELOGMETRICREQUEST = _descriptor.Descriptor( - name="CreateLogMetricRequest", - full_name="google.logging.v2.CreateLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.CreateLogMetricRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric", - full_name="google.logging.v2.CreateLogMetricRequest.metric", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1037, - serialized_end=1123, -) - - -_UPDATELOGMETRICREQUEST = _descriptor.Descriptor( - name="UpdateLogMetricRequest", - full_name="google.logging.v2.UpdateLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric_name", - full_name="google.logging.v2.UpdateLogMetricRequest.metric_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric", - full_name="google.logging.v2.UpdateLogMetricRequest.metric", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1125, - serialized_end=1216, -) - - -_DELETELOGMETRICREQUEST = _descriptor.Descriptor( - name="DeleteLogMetricRequest", - full_name="google.logging.v2.DeleteLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric_name", - full_name="google.logging.v2.DeleteLogMetricRequest.metric_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1218, - serialized_end=1263, -) - -_LOGMETRIC_LABELEXTRACTORSENTRY.containing_type = _LOGMETRIC -_LOGMETRIC.fields_by_name[ - "metric_descriptor" -].message_type = google_dot_api_dot_metric__pb2._METRICDESCRIPTOR -_LOGMETRIC.fields_by_name[ - "label_extractors" -].message_type = _LOGMETRIC_LABELEXTRACTORSENTRY -_LOGMETRIC.fields_by_name[ - "bucket_options" -].message_type = google_dot_api_dot_distribution__pb2._DISTRIBUTION_BUCKETOPTIONS -_LOGMETRIC.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGMETRIC.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGMETRIC.fields_by_name["version"].enum_type = _LOGMETRIC_APIVERSION -_LOGMETRIC_APIVERSION.containing_type = _LOGMETRIC -_LISTLOGMETRICSRESPONSE.fields_by_name["metrics"].message_type = _LOGMETRIC -_CREATELOGMETRICREQUEST.fields_by_name["metric"].message_type = _LOGMETRIC -_UPDATELOGMETRICREQUEST.fields_by_name["metric"].message_type = _LOGMETRIC -DESCRIPTOR.message_types_by_name["LogMetric"] = _LOGMETRIC -DESCRIPTOR.message_types_by_name["ListLogMetricsRequest"] = _LISTLOGMETRICSREQUEST -DESCRIPTOR.message_types_by_name["ListLogMetricsResponse"] = _LISTLOGMETRICSRESPONSE -DESCRIPTOR.message_types_by_name["GetLogMetricRequest"] = _GETLOGMETRICREQUEST -DESCRIPTOR.message_types_by_name["CreateLogMetricRequest"] = _CREATELOGMETRICREQUEST -DESCRIPTOR.message_types_by_name["UpdateLogMetricRequest"] = _UPDATELOGMETRICREQUEST -DESCRIPTOR.message_types_by_name["DeleteLogMetricRequest"] = _DELETELOGMETRICREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LogMetric = _reflection.GeneratedProtocolMessageType( - "LogMetric", - (_message.Message,), - dict( - LabelExtractorsEntry=_reflection.GeneratedProtocolMessageType( - "LabelExtractorsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_LOGMETRIC_LABELEXTRACTORSENTRY, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric.LabelExtractorsEntry) - ), - ), - DESCRIPTOR=_LOGMETRIC, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""Describes a logs-based metric. The value of the metric is the number of - log entries that match a logs filter in a given time interval. - - Logs-based metric can also be used to extract values from logs and - create a a distribution of the values. The distribution records the - statistics of the extracted values along with an optional histogram of - the values as specified by the bucket options. - - - Attributes: - name: - Required. The client-assigned metric identifier. Examples: - ``"error_count"``, ``"nginx/requests"``. Metric identifiers - are limited to 100 characters and can include only the - following characters: ``A-Z``, ``a-z``, ``0-9``, and the - special characters ``_-.,+!*',()%/``. The forward-slash - character (``/``) denotes a hierarchy of name pieces, and it - cannot be the first character of the name. The metric - identifier in this field must not be `URL-encoded - `__. However, - when the metric identifier appears as the ``[METRIC_ID]`` part - of a ``metric_name`` API parameter, then the metric identifier - must be URL-encoded. Example: ``"projects/my- - project/metrics/nginx%2Frequests"``. - description: - Optional. A description of this metric, which is used in - documentation. The maximum length of the description is 8000 - characters. - filter: - Required. An `advanced logs filter - `__ which is used to - match log entries. Example: :: "resource.type=gae_app - AND severity>=ERROR" The maximum length of the filter is - 20000 characters. - metric_descriptor: - Optional. The metric descriptor associated with the logs-based - metric. If unspecified, it uses a default metric descriptor - with a DELTA metric kind, INT64 value type, with no labels and - a unit of "1". Such a metric counts the number of log entries - matching the ``filter`` expression. The ``name``, ``type``, - and ``description`` fields in the ``metric_descriptor`` are - output only, and is constructed using the ``name`` and - ``description`` field in the LogMetric. To create a logs- - based metric that records a distribution of log values, a - DELTA metric kind with a DISTRIBUTION value type must be used - along with a ``value_extractor`` expression in the LogMetric. - Each label in the metric descriptor must have a matching label - name as the key and an extractor expression as the value in - the ``label_extractors`` map. The ``metric_kind`` and - ``value_type`` fields in the ``metric_descriptor`` cannot be - updated once initially configured. New labels can be added in - the ``metric_descriptor``, but existing labels cannot be - modified except for their description. - value_extractor: - Optional. A ``value_extractor`` is required when using a - distribution logs-based metric to extract the values to record - from a log entry. Two functions are supported for value - extraction: ``EXTRACT(field)`` or ``REGEXP_EXTRACT(field, - regex)``. The argument are: 1. field: The name of the log - entry field from which the value is to be extracted. 2. regex: - A regular expression using the Google RE2 syntax - (https://github.com/google/re2/wiki/Syntax) with a single - capture group to extract data from the specified log entry - field. The value of the field is converted to a string before - applying the regex. It is an error to specify a regex that - does not include exactly one capture group. The result of the - extraction must be convertible to a double type, as the - distribution always records double values. If either the - extraction or the conversion to double fails, then those - values are not recorded in the distribution. Example: - ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` - label_extractors: - Optional. A map from a label key string to an extractor - expression which is used to extract data from a log entry - field and assign as the label value. Each label key specified - in the LabelDescriptor must have an associated extractor - expression in this map. The syntax of the extractor expression - is the same as for the ``value_extractor`` field. The - extracted value is converted to the type defined in the label - descriptor. If the either the extraction or the type - conversion fails, the label will have a default value. The - default value for a string label is an empty string, for an - integer label its 0, and for a boolean label its ``false``. - Note that there are upper bounds on the maximum number of - labels and the number of active time series that are allowed - in a project. - bucket_options: - Optional. The ``bucket_options`` are required when the logs- - based metric is using a DISTRIBUTION value type and it - describes the bucket boundaries used to create a histogram of - the extracted values. - create_time: - Output only. The creation timestamp of the metric. This field - may not be present for older metrics. - update_time: - Output only. The last update timestamp of the metric. This - field may not be present for older metrics. - version: - Deprecated. The API version that created or updated this - metric. The v2 format is used by default and cannot be - changed. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric) - ), -) -_sym_db.RegisterMessage(LogMetric) -_sym_db.RegisterMessage(LogMetric.LabelExtractorsEntry) - -ListLogMetricsRequest = _reflection.GeneratedProtocolMessageType( - "ListLogMetricsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGMETRICSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to ListLogMetrics. - - - Attributes: - parent: - Required. The name of the project containing the metrics: :: - "projects/[PROJECT_ID]" - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsRequest) - ), -) -_sym_db.RegisterMessage(ListLogMetricsRequest) - -ListLogMetricsResponse = _reflection.GeneratedProtocolMessageType( - "ListLogMetricsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGMETRICSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""Result returned from ListLogMetrics. - - - Attributes: - metrics: - A list of logs-based metrics. - next_page_token: - If there might be more results than appear in this response, - then ``nextPageToken`` is included. To get the next set of - results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsResponse) - ), -) -_sym_db.RegisterMessage(ListLogMetricsResponse) - -GetLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "GetLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETLOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to GetLogMetric. - - - Attributes: - metric_name: - The resource name of the desired metric: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.GetLogMetricRequest) - ), -) -_sym_db.RegisterMessage(GetLogMetricRequest) - -CreateLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "CreateLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATELOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to CreateLogMetric. - - - Attributes: - parent: - The resource name of the project in which to create the - metric: :: "projects/[PROJECT_ID]" The new metric must - be provided in the request. - metric: - The new logs-based metric, which must not have an identifier - that already exists. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.CreateLogMetricRequest) - ), -) -_sym_db.RegisterMessage(CreateLogMetricRequest) - -UpdateLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "UpdateLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATELOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to UpdateLogMetric. - - - Attributes: - metric_name: - The resource name of the metric to update: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" The updated - metric must be provided in the request and it's ``name`` field - must be the same as ``[METRIC_ID]`` If the metric does not - exist in ``[PROJECT_ID]``, then a new metric is created. - metric: - The updated metric. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateLogMetricRequest) - ), -) -_sym_db.RegisterMessage(UpdateLogMetricRequest) - -DeleteLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "DeleteLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETELOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to DeleteLogMetric. - - - Attributes: - metric_name: - The resource name of the metric to delete: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogMetricRequest) - ), -) -_sym_db.RegisterMessage(DeleteLogMetricRequest) - - -DESCRIPTOR._options = None -_LOGMETRIC_LABELEXTRACTORSENTRY._options = None -_LOGMETRIC.fields_by_name["version"]._options = None - -_METRICSSERVICEV2 = _descriptor.ServiceDescriptor( - name="MetricsServiceV2", - full_name="google.logging.v2.MetricsServiceV2", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" - ), - serialized_start=1266, - serialized_end=2262, - methods=[ - _descriptor.MethodDescriptor( - name="ListLogMetrics", - full_name="google.logging.v2.MetricsServiceV2.ListLogMetrics", - index=0, - containing_service=None, - input_type=_LISTLOGMETRICSREQUEST, - output_type=_LISTLOGMETRICSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002!\022\037/v2/{parent=projects/*}/metrics" - ), - ), - _descriptor.MethodDescriptor( - name="GetLogMetric", - full_name="google.logging.v2.MetricsServiceV2.GetLogMetric", - index=1, - containing_service=None, - input_type=_GETLOGMETRICREQUEST, - output_type=_LOGMETRIC, - serialized_options=_b( - "\202\323\344\223\002(\022&/v2/{metric_name=projects/*/metrics/*}" - ), - ), - _descriptor.MethodDescriptor( - name="CreateLogMetric", - full_name="google.logging.v2.MetricsServiceV2.CreateLogMetric", - index=2, - containing_service=None, - input_type=_CREATELOGMETRICREQUEST, - output_type=_LOGMETRIC, - serialized_options=_b( - '\202\323\344\223\002)"\037/v2/{parent=projects/*}/metrics:\006metric' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateLogMetric", - full_name="google.logging.v2.MetricsServiceV2.UpdateLogMetric", - index=3, - containing_service=None, - input_type=_UPDATELOGMETRICREQUEST, - output_type=_LOGMETRIC, - serialized_options=_b( - "\202\323\344\223\0020\032&/v2/{metric_name=projects/*/metrics/*}:\006metric" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteLogMetric", - full_name="google.logging.v2.MetricsServiceV2.DeleteLogMetric", - index=4, - containing_service=None, - input_type=_DELETELOGMETRICREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002(*&/v2/{metric_name=projects/*/metrics/*}" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_METRICSSERVICEV2) - -DESCRIPTOR.services_by_name["MetricsServiceV2"] = _METRICSSERVICEV2 - -# @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py b/logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py deleted file mode 100644 index 09f84e038a1b..000000000000 --- a/logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py +++ /dev/null @@ -1,118 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.logging_v2.proto import ( - logging_metrics_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class MetricsServiceV2Stub(object): - """Service for configuring logs-based metrics. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListLogMetrics = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/ListLogMetrics", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.FromString, - ) - self.GetLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/GetLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, - ) - self.CreateLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/CreateLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, - ) - self.UpdateLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, - ) - self.DeleteLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - -class MetricsServiceV2Servicer(object): - """Service for configuring logs-based metrics. - """ - - def ListLogMetrics(self, request, context): - """Lists logs-based metrics. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetLogMetric(self, request, context): - """Gets a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateLogMetric(self, request, context): - """Creates a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateLogMetric(self, request, context): - """Creates or updates a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteLogMetric(self, request, context): - """Deletes a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_MetricsServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - "ListLogMetrics": grpc.unary_unary_rpc_method_handler( - servicer.ListLogMetrics, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.SerializeToString, - ), - "GetLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.GetLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - "CreateLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.CreateLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - "UpdateLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.UpdateLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - "DeleteLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.DeleteLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.logging.v2.MetricsServiceV2", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/logging/google/cloud/logging_v2/proto/logging_pb2.py b/logging/google/cloud/logging_v2/proto/logging_pb2.py deleted file mode 100644 index 79a73bd0f5fc..000000000000 --- a/logging/google/cloud/logging_v2/proto/logging_pb2.py +++ /dev/null @@ -1,1312 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/logging_v2/proto/logging.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.cloud.logging_v2.proto import ( - log_entry_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2, -) -from google.cloud.logging_v2.proto import ( - logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/logging_v2/proto/logging.proto", - package="google.logging.v2", - syntax="proto3", - serialized_options=_b( - "\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), - serialized_pb=_b( - '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x32google/cloud/logging_v2/proto/logging_config.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x85\n\n\x10LoggingServiceV2\x12\x88\x02\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xbd\x01\x82\xd3\xe4\x93\x02\xb6\x01* /v2/{log_name=projects/*/logs/*}Z\x1b*\x19/v2/{log_name=*/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"\x1c\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logs\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR, - google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_DELETELOGREQUEST = _descriptor.Descriptor( - name="DeleteLogRequest", - full_name="google.logging.v2.DeleteLogRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_name", - full_name="google.logging.v2.DeleteLogRequest.log_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=376, - serialized_end=412, -) - - -_WRITELOGENTRIESREQUEST_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=667, - serialized_end=712, -) - -_WRITELOGENTRIESREQUEST = _descriptor.Descriptor( - name="WriteLogEntriesRequest", - full_name="google.logging.v2.WriteLogEntriesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_name", - full_name="google.logging.v2.WriteLogEntriesRequest.log_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource", - full_name="google.logging.v2.WriteLogEntriesRequest.resource", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.logging.v2.WriteLogEntriesRequest.labels", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="entries", - full_name="google.logging.v2.WriteLogEntriesRequest.entries", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="partial_success", - full_name="google.logging.v2.WriteLogEntriesRequest.partial_success", - index=4, - number=5, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dry_run", - full_name="google.logging.v2.WriteLogEntriesRequest.dry_run", - index=5, - number=6, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=415, - serialized_end=712, -) - - -_WRITELOGENTRIESRESPONSE = _descriptor.Descriptor( - name="WriteLogEntriesResponse", - full_name="google.logging.v2.WriteLogEntriesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=714, - serialized_end=739, -) - - -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY = _descriptor.Descriptor( - name="LogEntryErrorsEntry", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.key", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=869, - serialized_end=942, -) - -_WRITELOGENTRIESPARTIALERRORS = _descriptor.Descriptor( - name="WriteLogEntriesPartialErrors", - full_name="google.logging.v2.WriteLogEntriesPartialErrors", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_entry_errors", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.log_entry_errors", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=742, - serialized_end=942, -) - - -_LISTLOGENTRIESREQUEST = _descriptor.Descriptor( - name="ListLogEntriesRequest", - full_name="google.logging.v2.ListLogEntriesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_ids", - full_name="google.logging.v2.ListLogEntriesRequest.project_ids", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource_names", - full_name="google.logging.v2.ListLogEntriesRequest.resource_names", - index=1, - number=8, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.ListLogEntriesRequest.filter", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="order_by", - full_name="google.logging.v2.ListLogEntriesRequest.order_by", - index=3, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListLogEntriesRequest.page_size", - index=4, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListLogEntriesRequest.page_token", - index=5, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=945, - serialized_end=1090, -) - - -_LISTLOGENTRIESRESPONSE = _descriptor.Descriptor( - name="ListLogEntriesResponse", - full_name="google.logging.v2.ListLogEntriesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="entries", - full_name="google.logging.v2.ListLogEntriesResponse.entries", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListLogEntriesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1092, - serialized_end=1187, -) - - -_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST = _descriptor.Descriptor( - name="ListMonitoredResourceDescriptorsRequest", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_size", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1189, - serialized_end=1269, -) - - -_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE = _descriptor.Descriptor( - name="ListMonitoredResourceDescriptorsResponse", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="resource_descriptors", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse.resource_descriptors", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1272, - serialized_end=1410, -) - - -_LISTLOGSREQUEST = _descriptor.Descriptor( - name="ListLogsRequest", - full_name="google.logging.v2.ListLogsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListLogsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListLogsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListLogsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1412, - serialized_end=1484, -) - - -_LISTLOGSRESPONSE = _descriptor.Descriptor( - name="ListLogsResponse", - full_name="google.logging.v2.ListLogsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_names", - full_name="google.logging.v2.ListLogsResponse.log_names", - index=0, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListLogsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1486, - serialized_end=1548, -) - -_WRITELOGENTRIESREQUEST_LABELSENTRY.containing_type = _WRITELOGENTRIESREQUEST -_WRITELOGENTRIESREQUEST.fields_by_name[ - "resource" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE -_WRITELOGENTRIESREQUEST.fields_by_name[ - "labels" -].message_type = _WRITELOGENTRIESREQUEST_LABELSENTRY -_WRITELOGENTRIESREQUEST.fields_by_name[ - "entries" -].message_type = ( - google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY -) -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.fields_by_name[ - "value" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.containing_type = ( - _WRITELOGENTRIESPARTIALERRORS -) -_WRITELOGENTRIESPARTIALERRORS.fields_by_name[ - "log_entry_errors" -].message_type = _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY -_LISTLOGENTRIESRESPONSE.fields_by_name[ - "entries" -].message_type = ( - google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY -) -_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE.fields_by_name[ - "resource_descriptors" -].message_type = ( - google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEDESCRIPTOR -) -DESCRIPTOR.message_types_by_name["DeleteLogRequest"] = _DELETELOGREQUEST -DESCRIPTOR.message_types_by_name["WriteLogEntriesRequest"] = _WRITELOGENTRIESREQUEST -DESCRIPTOR.message_types_by_name["WriteLogEntriesResponse"] = _WRITELOGENTRIESRESPONSE -DESCRIPTOR.message_types_by_name[ - "WriteLogEntriesPartialErrors" -] = _WRITELOGENTRIESPARTIALERRORS -DESCRIPTOR.message_types_by_name["ListLogEntriesRequest"] = _LISTLOGENTRIESREQUEST -DESCRIPTOR.message_types_by_name["ListLogEntriesResponse"] = _LISTLOGENTRIESRESPONSE -DESCRIPTOR.message_types_by_name[ - "ListMonitoredResourceDescriptorsRequest" -] = _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListMonitoredResourceDescriptorsResponse" -] = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE -DESCRIPTOR.message_types_by_name["ListLogsRequest"] = _LISTLOGSREQUEST -DESCRIPTOR.message_types_by_name["ListLogsResponse"] = _LISTLOGSRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -DeleteLogRequest = _reflection.GeneratedProtocolMessageType( - "DeleteLogRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETELOGREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to DeleteLog. - - - Attributes: - log_name: - Required. The resource name of the log to delete: :: - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- - encoded. For example, ``"projects/my-project- - id/logs/syslog"``, ``"organizations/1234567890/logs/cloudresou - rcemanager.googleapis.com%2Factivity"``. For more information - about log names, see [LogEntry][google.logging.v2.LogEntry]. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogRequest) - ), -) -_sym_db.RegisterMessage(DeleteLogRequest) - -WriteLogEntriesRequest = _reflection.GeneratedProtocolMessageType( - "WriteLogEntriesRequest", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_WRITELOGENTRIESREQUEST_LABELSENTRY, - __module__="google.cloud.logging_v2.proto.logging_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest.LabelsEntry) - ), - ), - DESCRIPTOR=_WRITELOGENTRIESREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to WriteLogEntries. - - - Attributes: - log_name: - Optional. A default log resource name that is assigned to all - log entries in ``entries`` that do not specify a value for - ``log_name``: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- - encoded. For example: :: "projects/my-project- - id/logs/syslog" "organizations/1234567890/logs/cloudresour - cemanager.googleapis.com%2Factivity" The permission - logging.logEntries.create is needed on each project, - organization, billing account, or folder that is receiving new - log entries, whether the resource is specified in logName or - in an individual log entry. - resource: - Optional. A default monitored resource object that is assigned - to all log entries in ``entries`` that do not specify a value - for ``resource``. Example: :: { "type": "gce_instance", - "labels": { "zone": "us-central1-a", "instance_id": - "00000000000000000000" }} See - [LogEntry][google.logging.v2.LogEntry]. - labels: - Optional. Default labels that are added to the ``labels`` - field of all log entries in ``entries``. If a log entry - already has a label with the same key as a label in this - parameter, then the log entry's label is not changed. See - [LogEntry][google.logging.v2.LogEntry]. - entries: - Required. The log entries to send to Logging. The order of log - entries in this list does not matter. Values supplied in this - method's ``log_name``, ``resource``, and ``labels`` fields are - copied into those log entries in this list that do not include - values for their corresponding fields. For more information, - see the [LogEntry][google.logging.v2.LogEntry] type. If the - ``timestamp`` or ``insert_id`` fields are missing in log - entries, then this method supplies the current time or a - unique identifier, respectively. The supplied values are - chosen so that, among the log entries that did not supply - their own values, the entries earlier in the list will sort - before the entries later in the list. See the ``entries.list`` - method. Log entries with timestamps that are more than the - `logs retention period `__ in the past - or more than 24 hours in the future will not be available when - calling ``entries.list``. However, those log entries can still - be `exported with LogSinks `__. To improve throughput and to avoid exceeding the - `quota limit `__ for calls to - ``entries.write``, you should try to include several log - entries in this list, rather than calling this method for each - individual log entry. - partial_success: - Optional. Whether valid entries should be written even if some - other entries fail due to INVALID\_ARGUMENT or - PERMISSION\_DENIED errors. If any entry is not written, then - the response status is the error associated with one of the - failed entries and the response includes error details keyed - by the entries' zero-based index in the ``entries.write`` - method. - dry_run: - Optional. If true, the request should expect normal response, - but the entries won't be persisted nor exported. Useful for - checking whether the logging API endpoints are working - properly before sending valuable data. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest) - ), -) -_sym_db.RegisterMessage(WriteLogEntriesRequest) -_sym_db.RegisterMessage(WriteLogEntriesRequest.LabelsEntry) - -WriteLogEntriesResponse = _reflection.GeneratedProtocolMessageType( - "WriteLogEntriesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_WRITELOGENTRIESRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from WriteLogEntries. empty - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesResponse) - ), -) -_sym_db.RegisterMessage(WriteLogEntriesResponse) - -WriteLogEntriesPartialErrors = _reflection.GeneratedProtocolMessageType( - "WriteLogEntriesPartialErrors", - (_message.Message,), - dict( - LogEntryErrorsEntry=_reflection.GeneratedProtocolMessageType( - "LogEntryErrorsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, - __module__="google.cloud.logging_v2.proto.logging_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry) - ), - ), - DESCRIPTOR=_WRITELOGENTRIESPARTIALERRORS, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Error details for WriteLogEntries with partial success. - - - Attributes: - log_entry_errors: - When ``WriteLogEntriesRequest.partial_success`` is true, - records the error status for entries that were not written due - to a permanent error, keyed by the entry's zero-based index in - ``WriteLogEntriesRequest.entries``. Failed requests for which - no entries are written will not include per-entry errors. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors) - ), -) -_sym_db.RegisterMessage(WriteLogEntriesPartialErrors) -_sym_db.RegisterMessage(WriteLogEntriesPartialErrors.LogEntryErrorsEntry) - -ListLogEntriesRequest = _reflection.GeneratedProtocolMessageType( - "ListLogEntriesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGENTRIESREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to ``ListLogEntries``. - - - Attributes: - project_ids: - Deprecated. Use ``resource_names`` instead. One or more - project identifiers or project numbers from which to retrieve - log entries. Example: ``"my-project-1A"``. - resource_names: - Required. Names of one or more parent resources from which to - retrieve log entries: :: "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" Projects listed in the ``project_ids`` - field are added to this list. - filter: - Optional. A filter that chooses which log entries to return. - See `Advanced Logs Filters - `__. Only log entries - that match the filter are returned. An empty filter matches - all log entries in the resources listed in ``resource_names``. - Referencing a parent resource that is not listed in - ``resource_names`` will cause the filter to return no results. - The maximum length of the filter is 20000 characters. - order_by: - Optional. How the results should be sorted. Presently, the - only permitted values are ``"timestamp asc"`` (default) and - ``"timestamp desc"``. The first option returns entries in - order of increasing values of ``LogEntry.timestamp`` (oldest - first), and the second option returns entries in order of - decreasing timestamps (newest first). Entries with equal - timestamps are returned in order of their ``insert_id`` - values. - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``next_page_token`` in the response indicates that more - results might be available. - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``page_token`` must be - the value of ``next_page_token`` from the previous response. - The values of other method parameters should be identical to - those in the previous call. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesRequest) - ), -) -_sym_db.RegisterMessage(ListLogEntriesRequest) - -ListLogEntriesResponse = _reflection.GeneratedProtocolMessageType( - "ListLogEntriesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGENTRIESRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from ``ListLogEntries``. - - - Attributes: - entries: - A list of log entries. If ``entries`` is empty, - ``nextPageToken`` may still be returned, indicating that more - entries may exist. See ``nextPageToken`` for more information. - next_page_token: - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the next - set of results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. If a value for - ``next_page_token`` appears and the ``entries`` field is - empty, it means that the search found no log entries so far - but it did not have time to search all the possible log - entries. Retry the method with this value for ``page_token`` - to continue the search. Alternatively, consider speeding up - the search by changing your filter to specify a single log - name or resource type, or to narrow the time range of the - search. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesResponse) - ), -) -_sym_db.RegisterMessage(ListLogEntriesResponse) - -ListMonitoredResourceDescriptorsRequest = _reflection.GeneratedProtocolMessageType( - "ListMonitoredResourceDescriptorsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to ListMonitoredResourceDescriptors - - - Attributes: - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsRequest) - ), -) -_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsRequest) - -ListMonitoredResourceDescriptorsResponse = _reflection.GeneratedProtocolMessageType( - "ListMonitoredResourceDescriptorsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from ListMonitoredResourceDescriptors. - - - Attributes: - resource_descriptors: - A list of resource descriptors. - next_page_token: - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the next - set of results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsResponse) - ), -) -_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsResponse) - -ListLogsRequest = _reflection.GeneratedProtocolMessageType( - "ListLogsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to ListLogs. - - - Attributes: - parent: - Required. The resource name that owns the logs: :: - "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsRequest) - ), -) -_sym_db.RegisterMessage(ListLogsRequest) - -ListLogsResponse = _reflection.GeneratedProtocolMessageType( - "ListLogsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from ListLogs. - - - Attributes: - log_names: - A list of log names. For example, ``"projects/my- - project/logs/syslog"`` or ``"organizations/123/logs/cloudresou - rcemanager.googleapis.com%2Factivity"``. - next_page_token: - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the next - set of results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsResponse) - ), -) -_sym_db.RegisterMessage(ListLogsResponse) - - -DESCRIPTOR._options = None -_WRITELOGENTRIESREQUEST_LABELSENTRY._options = None -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY._options = None -_LISTLOGENTRIESREQUEST.fields_by_name["project_ids"]._options = None - -_LOGGINGSERVICEV2 = _descriptor.ServiceDescriptor( - name="LoggingServiceV2", - full_name="google.logging.v2.LoggingServiceV2", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" - ), - serialized_start=1551, - serialized_end=2836, - methods=[ - _descriptor.MethodDescriptor( - name="DeleteLog", - full_name="google.logging.v2.LoggingServiceV2.DeleteLog", - index=0, - containing_service=None, - input_type=_DELETELOGREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002\266\001* /v2/{log_name=projects/*/logs/*}Z\033*\031/v2/{log_name=*/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" - ), - ), - _descriptor.MethodDescriptor( - name="WriteLogEntries", - full_name="google.logging.v2.LoggingServiceV2.WriteLogEntries", - index=1, - containing_service=None, - input_type=_WRITELOGENTRIESREQUEST, - output_type=_WRITELOGENTRIESRESPONSE, - serialized_options=_b( - '\202\323\344\223\002\026"\021/v2/entries:write:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="ListLogEntries", - full_name="google.logging.v2.LoggingServiceV2.ListLogEntries", - index=2, - containing_service=None, - input_type=_LISTLOGENTRIESREQUEST, - output_type=_LISTLOGENTRIESRESPONSE, - serialized_options=_b( - '\202\323\344\223\002\025"\020/v2/entries:list:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="ListMonitoredResourceDescriptors", - full_name="google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", - index=3, - containing_service=None, - input_type=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, - output_type=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, - serialized_options=_b( - '\202\323\344\223\002"\022 /v2/monitoredResourceDescriptors' - ), - ), - _descriptor.MethodDescriptor( - name="ListLogs", - full_name="google.logging.v2.LoggingServiceV2.ListLogs", - index=4, - containing_service=None, - input_type=_LISTLOGSREQUEST, - output_type=_LISTLOGSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002\242\001\022\025/v2/{parent=*/*}/logsZ\036\022\034/v2/{parent=projects/*}/logsZ#\022!/v2/{parent=organizations/*}/logsZ\035\022\033/v2/{parent=folders/*}/logsZ%\022#/v2/{parent=billingAccounts/*}/logs" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_LOGGINGSERVICEV2) - -DESCRIPTOR.services_by_name["LoggingServiceV2"] = _LOGGINGSERVICEV2 - -# @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py b/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py deleted file mode 100644 index 2a2b3656925c..000000000000 --- a/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py +++ /dev/null @@ -1,130 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.logging_v2.proto import ( - logging_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class LoggingServiceV2Stub(object): - """Service for ingesting and querying logs. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.DeleteLog = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/DeleteLog", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.WriteLogEntries = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/WriteLogEntries", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.FromString, - ) - self.ListLogEntries = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/ListLogEntries", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.FromString, - ) - self.ListMonitoredResourceDescriptors = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.FromString, - ) - self.ListLogs = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/ListLogs", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.FromString, - ) - - -class LoggingServiceV2Servicer(object): - """Service for ingesting and querying logs. - """ - - def DeleteLog(self, request, context): - """Deletes all the log entries in a log. - The log reappears if it receives new entries. - Log entries written shortly before the delete operation might not be - deleted. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def WriteLogEntries(self, request, context): - """Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method - is used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use Logging. - A single request may contain log entries for a maximum of 1000 - different resources (projects, organizations, billing accounts or - folders) - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListLogEntries(self, request, context): - """Lists log entries. Use this method to retrieve log entries that originated - from a project/folder/organization/billing account. For ways to export log - entries, see [Exporting Logs](/logging/docs/export). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListMonitoredResourceDescriptors(self, request, context): - """Lists the descriptors for monitored resource types used by Logging. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListLogs(self, request, context): - """Lists the logs in projects, organizations, folders, or billing accounts. - Only logs that have entries are listed. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_LoggingServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - "DeleteLog": grpc.unary_unary_rpc_method_handler( - servicer.DeleteLog, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "WriteLogEntries": grpc.unary_unary_rpc_method_handler( - servicer.WriteLogEntries, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.SerializeToString, - ), - "ListLogEntries": grpc.unary_unary_rpc_method_handler( - servicer.ListLogEntries, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.SerializeToString, - ), - "ListMonitoredResourceDescriptors": grpc.unary_unary_rpc_method_handler( - servicer.ListMonitoredResourceDescriptors, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.SerializeToString, - ), - "ListLogs": grpc.unary_unary_rpc_method_handler( - servicer.ListLogs, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.logging.v2.LoggingServiceV2", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/logging/google/cloud/logging_v2/types.py b/logging/google/cloud/logging_v2/types.py deleted file mode 100644 index 464edbe709dc..000000000000 --- a/logging/google/cloud/logging_v2/types.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import sys - -from google.api import distribution_pb2 -from google.api import http_pb2 -from google.api import label_pb2 -from google.api import metric_pb2 -from google.api import monitored_resource_pb2 -from google.logging.type import http_request_pb2 -from google.protobuf import any_pb2 -from google.protobuf import descriptor_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import struct_pb2 -from google.protobuf import timestamp_pb2 -from google.rpc import status_pb2 - -from google.api_core.protobuf_helpers import get_messages -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.cloud.logging_v2.proto import logging_metrics_pb2 -from google.cloud.logging_v2.proto import logging_pb2 - - -_shared_modules = [ - distribution_pb2, - http_pb2, - label_pb2, - metric_pb2, - monitored_resource_pb2, - http_request_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - struct_pb2, - timestamp_pb2, - status_pb2, -] - -_local_modules = [log_entry_pb2, logging_config_pb2, logging_metrics_pb2, logging_pb2] - -names = [] - -for module in _shared_modules: - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) - -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.logging_v2.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - -__all__ = tuple(sorted(names)) diff --git a/logging/noxfile.py b/logging/noxfile.py deleted file mode 100644 index d2a53d2de4fb..000000000000 --- a/logging/noxfile.py +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import os -import shutil -import sys - -import nox - - -LOCAL_DEPS = ( - os.path.join('..', 'api_core'), - os.path.join('..', 'core'), -) -UNIT_TEST_DEPS = ( - 'mock', - 'pytest', - 'pytest-cov', - 'flask', - 'webapp2', - 'webob', -) - - -@nox.session(python="3.7") -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", "black", *LOCAL_DEPS) - session.run( - "black", - "--check", - "google", - "tests", - "docs", - ) - session.run("flake8", "google", "tests") - - -@nox.session(python="3.6") -def blacken(session): - """Run black. - - Format code to uniform standard. - """ - session.install("black") - session.run( - "black", - "google", - "tests", - "docs", - ) - - -@nox.session(python="3.7") -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def default(session, django_dep=('django',)): - """Default unit test session. - """ - - # Install all test dependencies, then install this package in-place. - deps = UNIT_TEST_DEPS - deps += django_dep - - session.install(*deps) - for local_dep in LOCAL_DEPS: - session.install('-e', local_dep) - session.install('-e', '.') - - # Run py.test against the unit tests. - session.run( - 'py.test', - '--quiet', - '--cov=google.cloud.logging', - '--cov=tests.unit', - '--cov-append', - '--cov-config=.coveragerc', - '--cov-report=', - '--cov-fail-under=97', - 'tests/unit', - *session.posargs - ) - - -@nox.session(python=['2.7', '3.5', '3.6', '3.7']) -def unit(session): - """Run the unit test suite.""" - - # Testing multiple version of django - # See https://www.djangoproject.com/download/ for supported version - django_deps_27 = [ - ('django==1.8.19',), - ('django >= 1.11.0, < 2.0.0dev',), - ] - - if session.virtualenv.interpreter == '2.7': - [default(session, django_dep=django) for django in django_deps_27] - else: - default(session) - - -@nox.session(python=['2.7', '3.6']) -def system(session): - """Run the system test suite.""" - - # Sanity check: Only run system tests if the environment variable is set. - if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - session.skip('Credentials must be set via environment variable.') - - # Use pre-release gRPC for system tests. - session.install('--pre', 'grpcio') - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install('mock', 'pytest') - for local_dep in LOCAL_DEPS: - session.install('-e', local_dep) - systest_deps = [ - '../bigquery/', - '../pubsub/', - '../storage/', - '../test_utils/', - ] - for systest_dep in systest_deps: - session.install('-e', systest_dep) - session.install('-e', '.') - - # Run py.test against the system tests. - session.run( - 'py.test', - '-vvv', - '-s', - 'tests/system', - *session.posargs) - - -@nox.session(python="3.7") -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - -@nox.session(python="3.7") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) \ No newline at end of file diff --git a/logging/pylint.config.py b/logging/pylint.config.py deleted file mode 100644 index 5d64b9d2f256..000000000000 --- a/logging/pylint.config.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This module is used to configure gcp-devrel-py-tools run-pylint.""" - -# Library configuration - -# library_additions = {} -# library_replacements = {} - -# Test configuration - -# test_additions = copy.deepcopy(library_additions) -# test_replacements = copy.deepcopy(library_replacements) diff --git a/logging/setup.cfg b/logging/setup.cfg deleted file mode 100644 index 3bd555500e37..000000000000 --- a/logging/setup.cfg +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 diff --git a/logging/setup.py b/logging/setup.py deleted file mode 100644 index fd71ab274b7d..000000000000 --- a/logging/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import io -import os - -import setuptools - - -# Package metadata. - -name = 'google-cloud-logging' -description = 'Stackdriver Logging API client library' -version = '1.14.0' -# Should be one of: -# 'Development Status :: 3 - Alpha' -# 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Production/Stable' -release_status = 'Development Status :: 5 - Production/Stable' -dependencies = [ - "google-api-core[grpc] >= 1.15.0, < 2.0.0dev", - "google-cloud-core >= 1.1.0, < 2.0dev", -] -extras = { -} - - -# Setup boilerplate below this line. - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -# Only include packages under the 'google' namespace. Do not include tests, -# benchmarks, etc. -packages = [ - package for package in setuptools.find_packages() - if package.startswith('google')] - -# Determine which namespaces are needed. -namespaces = ['google'] -if 'google.cloud' in packages: - namespaces.append('google.cloud') - - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author='Google LLC', - author_email='googleapis-packages@google.com', - license='Apache 2.0', - url='https://github.com/googleapis/google-cloud-python', - classifiers=[ - release_status, - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Operating System :: OS Independent', - 'Topic :: Internet', - ], - platforms='Posix; MacOS X; Windows', - packages=packages, - namespace_packages=namespaces, - install_requires=dependencies, - extras_require=extras, - python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', - include_package_data=True, - zip_safe=False, -) diff --git a/logging/synth.metadata b/logging/synth.metadata deleted file mode 100644 index 425ea50eea8e..000000000000 --- a/logging/synth.metadata +++ /dev/null @@ -1,39 +0,0 @@ -{ - "updateTime": "2019-11-19T13:27:19.668508Z", - "sources": [ - { - "generator": { - "name": "artman", - "version": "0.42.1", - "dockerImage": "googleapis/artman@sha256:c773192618c608a7a0415dd95282f841f8e6bcdef7dd760a988c93b77a64bd57" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "d8dd7fe8d5304f7bd1c52207703d7f27d5328c5a", - "internalRef": "281088257" - } - }, - { - "template": { - "name": "python_library", - "origin": "synthtool.gcp", - "version": "2019.10.17" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "logging", - "apiVersion": "v2", - "language": "python", - "generator": "gapic", - "config": "google/logging/artman_logging.yaml" - } - } - ] -} \ No newline at end of file diff --git a/logging/synth.py b/logging/synth.py deleted file mode 100644 index 4364f387b4c9..000000000000 --- a/logging/synth.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" -import synthtool as s -from synthtool import gcp - -gapic = gcp.GAPICGenerator() -common = gcp.CommonTemplates() - -# ---------------------------------------------------------------------------- -# Generate logging GAPIC layer -# ---------------------------------------------------------------------------- -library = gapic.py_library( - "logging", - "v2", - config_path="/google/logging/artman_logging.yaml", - artman_output_name="logging-v2", - include_protos=True, -) - -# the structure of the logging directory is a bit different, so manually copy the protos -s.move(library / "google/cloud/logging_v2/proto/cloud/logging_v2/proto", "google/cloud/logging_v2/proto") -s.move(library / "google/cloud/logging_v2/proto/*.proto") - -s.move(library / "google/cloud/logging_v2/gapic") -s.move(library / "tests/unit/gapic/v2") -s.move(library / "docs/gapic/v2") - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=95, cov_level=100) -# Don't move noxfile. logging has special testing setups for django, etc -s.move(templated_files, excludes="noxfile.py") - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/logging/tests/__init__.py b/logging/tests/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py b/logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py deleted file mode 100644 index d574de7785f7..000000000000 --- a/logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import time - -import google.auth -from google.api import monitored_resource_pb2 -from google.cloud import logging_v2 -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_pb2 - - -class TestSystemLoggingServiceV2(object): - def test_write_log_entries(self): - _, project_id = google.auth.default() - - client = logging_v2.LoggingServiceV2Client() - log_name = client.log_path(project_id, "test-{0}".format(time.time())) - resource = {} - labels = {} - entries = [] - response = client.write_log_entries( - entries, log_name=log_name, resource=resource, labels=labels - ) diff --git a/logging/tests/system/test_system.py b/logging/tests/system/test_system.py deleted file mode 100644 index ea51aa8fd729..000000000000 --- a/logging/tests/system/test_system.py +++ /dev/null @@ -1,539 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import logging -import unittest - -from google.api_core.exceptions import BadGateway -from google.api_core.exceptions import Conflict -from google.api_core.exceptions import NotFound -from google.api_core.exceptions import TooManyRequests -from google.api_core.exceptions import ResourceExhausted -from google.api_core.exceptions import RetryError -from google.api_core.exceptions import ServiceUnavailable -from google.cloud._helpers import UTC -import google.cloud.logging -import google.cloud.logging.handlers.handlers -from google.cloud.logging.handlers.handlers import CloudLoggingHandler -from google.cloud.logging.handlers.transports import SyncTransport -from google.cloud.logging import client -from google.cloud.logging.resource import Resource - -from test_utils.retry import RetryErrors -from test_utils.retry import RetryResult -from test_utils.system import unique_resource_id - -_RESOURCE_ID = unique_resource_id("-") -DEFAULT_FILTER = "logName:syslog AND severity>=INFO" -DEFAULT_DESCRIPTION = "System testing" -retry_429 = RetryErrors(TooManyRequests) - - -def _consume_entries(logger): - """Consume all log entries from logger iterator. - - :type logger: :class:`~google.cloud.logging.logger.Logger` - :param logger: A Logger containing entries. - - :rtype: list - :returns: List of all entries consumed. - """ - return list(logger.list_entries()) - - -def _list_entries(logger): - """Retry-ing list entries in a logger. - - Retry until there are actual results and retry on any - failures. - - :type logger: :class:`~google.cloud.logging.logger.Logger` - :param logger: A Logger containing entries. - - :rtype: list - :returns: List of all entries consumed. - """ - inner = RetryResult(_has_entries, max_tries=9)(_consume_entries) - outer = RetryErrors((ServiceUnavailable, ResourceExhausted), max_tries=9)(inner) - return outer(logger) - - -def _has_entries(result): - return len(result) > 0 - - -class Config(object): - """Run-time configuration to be modified at set-up. - - This is a mutable stand-in to allow test set-up to modify - global state. - """ - - CLIENT = None - - -def setUpModule(): - Config.CLIENT = client.Client() - - -class TestLogging(unittest.TestCase): - - JSON_PAYLOAD = { - "message": "System test: test_log_struct", - "weather": { - "clouds": "party or partly", - "temperature": 70, - "precipitation": False, - }, - } - TYPE_FILTER = 'protoPayload.@type = "{}"' - - def setUp(self): - self.to_delete = [] - self._handlers_cache = logging.getLogger().handlers[:] - - def tearDown(self): - retry_not_found = RetryErrors((NotFound), max_tries=4) - retry_other = RetryErrors((TooManyRequests, RetryError)) - for doomed in self.to_delete: - try: - retry_not_found(retry_other(doomed.delete))() - except AttributeError: - client, dataset = doomed - retry_not_found(retry_other(client.delete_dataset))(dataset) - except NotFound: - pass - logging.getLogger().handlers = self._handlers_cache[:] - - @staticmethod - def _logger_name(prefix): - return prefix + unique_resource_id("-") - - def test_list_entry_with_unregistered(self): - from google.protobuf import any_pb2 - from google.protobuf import descriptor_pool - from google.cloud.logging import entries - - pool = descriptor_pool.Default() - type_name = "google.cloud.audit.AuditLog" - # Make sure the descriptor is not known in the registry. - with self.assertRaises(KeyError): - pool.FindMessageTypeByName(type_name) - - type_url = "type.googleapis.com/" + type_name - filter_ = self.TYPE_FILTER.format(type_url) - entry_iter = iter(Config.CLIENT.list_entries(page_size=1, filter_=filter_)) - - retry = RetryErrors(TooManyRequests) - protobuf_entry = retry(lambda: next(entry_iter))() - - self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) - if Config.CLIENT._use_grpc: - self.assertIsNone(protobuf_entry.payload_json) - self.assertIsInstance(protobuf_entry.payload_pb, any_pb2.Any) - self.assertEqual(protobuf_entry.payload_pb.type_url, type_url) - else: - self.assertIsNone(protobuf_entry.payload_pb) - self.assertEqual(protobuf_entry.payload_json["@type"], type_url) - - def test_log_text(self): - TEXT_PAYLOAD = "System test: test_log_text" - logger = Config.CLIENT.logger(self._logger_name("log_text")) - self.to_delete.append(logger) - logger.log_text(TEXT_PAYLOAD) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, TEXT_PAYLOAD) - - def test_log_text_with_timestamp(self): - import datetime - - text_payload = "System test: test_log_text_with_timestamp" - logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) - now = datetime.datetime.utcnow() - - self.to_delete.append(logger) - - logger.log_text(text_payload, timestamp=now) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, text_payload) - self.assertEqual(entries[0].timestamp, now.replace(tzinfo=UTC)) - self.assertIsInstance(entries[0].received_timestamp, datetime.datetime) - - def test_log_text_with_resource(self): - text_payload = "System test: test_log_text_with_timestamp" - - logger = Config.CLIENT.logger(self._logger_name("log_text_res")) - now = datetime.datetime.utcnow() - resource = Resource( - type="gae_app", - labels={"module_id": "default", "version_id": "test", "zone": ""}, - ) - - self.to_delete.append(logger) - - logger.log_text(text_payload, timestamp=now, resource=resource) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, text_payload) - # project_id is output only so we don't want it in assertion - del entries[0].resource.labels["project_id"] - self.assertEqual(entries[0].resource, resource) - - def test_log_text_w_metadata(self): - TEXT_PAYLOAD = "System test: test_log_text" - INSERT_ID = "INSERTID" - SEVERITY = "INFO" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = 500 - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - logger = Config.CLIENT.logger(self._logger_name("log_text_md")) - self.to_delete.append(logger) - - logger.log_text( - TEXT_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY, http_request=REQUEST - ) - entries = _list_entries(logger) - - self.assertEqual(len(entries), 1) - - entry = entries[0] - self.assertEqual(entry.payload, TEXT_PAYLOAD) - self.assertEqual(entry.insert_id, INSERT_ID) - self.assertEqual(entry.severity, SEVERITY) - - request = entry.http_request - self.assertEqual(request["requestMethod"], METHOD) - self.assertEqual(request["requestUrl"], URI) - self.assertEqual(request["status"], STATUS) - - def test_log_struct(self): - logger = Config.CLIENT.logger(self._logger_name("log_struct")) - self.to_delete.append(logger) - - logger.log_struct(self.JSON_PAYLOAD) - entries = _list_entries(logger) - - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) - - def test_log_struct_w_metadata(self): - INSERT_ID = "INSERTID" - SEVERITY = "INFO" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = 500 - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - logger = Config.CLIENT.logger(self._logger_name("log_struct_md")) - self.to_delete.append(logger) - - logger.log_struct( - self.JSON_PAYLOAD, - insert_id=INSERT_ID, - severity=SEVERITY, - http_request=REQUEST, - ) - entries = _list_entries(logger) - - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) - self.assertEqual(entries[0].insert_id, INSERT_ID) - self.assertEqual(entries[0].severity, SEVERITY) - request = entries[0].http_request - self.assertEqual(request["requestMethod"], METHOD) - self.assertEqual(request["requestUrl"], URI) - self.assertEqual(request["status"], STATUS) - - def test_log_handler_async(self): - LOG_MESSAGE = "It was the worst of times" - - handler_name = self._logger_name("handler_async") - handler = CloudLoggingHandler(Config.CLIENT, name=handler_name) - # only create the logger to delete, hidden otherwise - logger = Config.CLIENT.logger(handler_name) - self.to_delete.append(logger) - - cloud_logger = logging.getLogger(handler.name) - cloud_logger.addHandler(handler) - cloud_logger.warn(LOG_MESSAGE) - handler.flush() - entries = _list_entries(logger) - expected_payload = {"message": LOG_MESSAGE, "python_logger": handler.name} - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, expected_payload) - - def test_log_handler_sync(self): - LOG_MESSAGE = "It was the best of times." - - handler_name = self._logger_name("handler_sync") - handler = CloudLoggingHandler( - Config.CLIENT, name=handler_name, transport=SyncTransport - ) - - # only create the logger to delete, hidden otherwise - logger = Config.CLIENT.logger(handler.name) - self.to_delete.append(logger) - - LOGGER_NAME = "mylogger" - cloud_logger = logging.getLogger(LOGGER_NAME) - cloud_logger.addHandler(handler) - cloud_logger.warn(LOG_MESSAGE) - - entries = _list_entries(logger) - expected_payload = {"message": LOG_MESSAGE, "python_logger": LOGGER_NAME} - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, expected_payload) - - def test_log_root_handler(self): - LOG_MESSAGE = "It was the best of times." - - handler = CloudLoggingHandler( - Config.CLIENT, name=self._logger_name("handler_root") - ) - # only create the logger to delete, hidden otherwise - logger = Config.CLIENT.logger(handler.name) - self.to_delete.append(logger) - - google.cloud.logging.handlers.handlers.setup_logging(handler) - logging.warn(LOG_MESSAGE) - - entries = _list_entries(logger) - expected_payload = {"message": LOG_MESSAGE, "python_logger": "root"} - - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, expected_payload) - - def test_create_metric(self): - METRIC_NAME = "test-create-metric%s" % (_RESOURCE_ID,) - metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) - self.assertFalse(metric.exists()) - retry = RetryErrors(Conflict) - - retry(metric.create)() - - self.to_delete.append(metric) - self.assertTrue(metric.exists()) - - def test_list_metrics(self): - METRIC_NAME = "test-list-metrics%s" % (_RESOURCE_ID,) - metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) - self.assertFalse(metric.exists()) - before_metrics = list(Config.CLIENT.list_metrics()) - before_names = set(before.name for before in before_metrics) - self.assertFalse(metric.name in before_names) - retry = RetryErrors(Conflict) - retry(metric.create)() - self.to_delete.append(metric) - self.assertTrue(metric.exists()) - - after_metrics = list(Config.CLIENT.list_metrics()) - - after_names = set(after.name for after in after_metrics) - self.assertTrue(metric.name in after_names) - - def test_reload_metric(self): - METRIC_NAME = "test-reload-metric%s" % (_RESOURCE_ID,) - retry = RetryErrors(Conflict) - metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) - self.assertFalse(metric.exists()) - retry(metric.create)() - self.to_delete.append(metric) - metric.filter_ = "logName:other" - metric.description = "local changes" - - metric.reload() - - self.assertEqual(metric.filter_, DEFAULT_FILTER) - self.assertEqual(metric.description, DEFAULT_DESCRIPTION) - - def test_update_metric(self): - METRIC_NAME = "test-update-metric%s" % (_RESOURCE_ID,) - retry = RetryErrors(Conflict) - NEW_FILTER = "logName:other" - NEW_DESCRIPTION = "updated" - metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) - self.assertFalse(metric.exists()) - retry(metric.create)() - self.to_delete.append(metric) - metric.filter_ = NEW_FILTER - metric.description = NEW_DESCRIPTION - - metric.update() - - after_metrics = list(Config.CLIENT.list_metrics()) - after_info = {metric.name: metric for metric in after_metrics} - after = after_info[METRIC_NAME] - self.assertEqual(after.filter_, NEW_FILTER) - self.assertEqual(after.description, NEW_DESCRIPTION) - - def _init_storage_bucket(self): - from google.cloud import storage - - BUCKET_NAME = "g-c-python-testing%s" % (_RESOURCE_ID,) - BUCKET_URI = "storage.googleapis.com/%s" % (BUCKET_NAME,) - - # Create the destination bucket, and set up the ACL to allow - # Stackdriver Logging to write into it. - retry = RetryErrors((Conflict, TooManyRequests, ServiceUnavailable)) - storage_client = storage.Client() - bucket = storage_client.bucket(BUCKET_NAME) - retry(bucket.create)() - self.to_delete.append(bucket) - bucket.acl.reload() - logs_group = bucket.acl.group("cloud-logs@google.com") - logs_group.grant_owner() - bucket.acl.add_entity(logs_group) - bucket.acl.save() - - return BUCKET_URI - - def test_create_sink_storage_bucket(self): - uri = self._init_storage_bucket() - SINK_NAME = "test-create-sink-bucket%s" % (_RESOURCE_ID,) - - retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) - sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) - self.assertFalse(sink.exists()) - - retry(sink.create)() - - self.to_delete.append(sink) - self.assertTrue(sink.exists()) - - def test_create_sink_pubsub_topic(self): - from google.cloud import pubsub_v1 - - SINK_NAME = "test-create-sink-topic%s" % (_RESOURCE_ID,) - TOPIC_NAME = "logging-systest{}".format(unique_resource_id("-")) - - # Create the destination topic, and set up the IAM policy to allow - # Stackdriver Logging to write into it. - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(Config.CLIENT.project, TOPIC_NAME) - self.to_delete.append(_DeleteWrapper(publisher, topic_path)) - publisher.create_topic(topic_path) - - policy = publisher.get_iam_policy(topic_path) - policy.bindings.add(role="roles/owner", members=["group:cloud-logs@google.com"]) - publisher.set_iam_policy(topic_path, policy) - - TOPIC_URI = "pubsub.googleapis.com/%s" % (topic_path,) - - retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) - sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, TOPIC_URI) - self.assertFalse(sink.exists()) - - retry(sink.create)() - - self.to_delete.append(sink) - self.assertTrue(sink.exists()) - - def _init_bigquery_dataset(self): - from google.cloud import bigquery - from google.cloud.bigquery.dataset import AccessEntry - - dataset_name = ("system_testing_dataset" + _RESOURCE_ID).replace("-", "_") - dataset_uri = "bigquery.googleapis.com/projects/%s/datasets/%s" % ( - Config.CLIENT.project, - dataset_name, - ) - - # Create the destination dataset, and set up the ACL to allow - # Stackdriver Logging to write into it. - retry = RetryErrors((TooManyRequests, BadGateway, ServiceUnavailable)) - bigquery_client = bigquery.Client() - dataset_ref = bigquery_client.dataset(dataset_name) - dataset = retry(bigquery_client.create_dataset)(bigquery.Dataset(dataset_ref)) - self.to_delete.append((bigquery_client, dataset)) - bigquery_client.get_dataset(dataset) - access = AccessEntry("WRITER", "groupByEmail", "cloud-logs@google.com") - dataset.access_entries.append(access) - bigquery_client.update_dataset(dataset, ["access_entries"]) - return dataset_uri - - def test_create_sink_bigquery_dataset(self): - SINK_NAME = "test-create-sink-dataset%s" % (_RESOURCE_ID,) - retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) - uri = self._init_bigquery_dataset() - sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) - self.assertFalse(sink.exists()) - - retry(sink.create)() - - self.to_delete.append(sink) - self.assertTrue(sink.exists()) - - def test_list_sinks(self): - SINK_NAME = "test-list-sinks%s" % (_RESOURCE_ID,) - uri = self._init_storage_bucket() - retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) - sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) - self.assertFalse(sink.exists()) - before_sinks = list(Config.CLIENT.list_sinks()) - before_names = set(before.name for before in before_sinks) - self.assertFalse(sink.name in before_names) - retry(sink.create)() - self.to_delete.append(sink) - self.assertTrue(sink.exists()) - - after_sinks = list(Config.CLIENT.list_sinks()) - - after_names = set(after.name for after in after_sinks) - self.assertTrue(sink.name in after_names) - - def test_reload_sink(self): - SINK_NAME = "test-reload-sink%s" % (_RESOURCE_ID,) - retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) - uri = self._init_bigquery_dataset() - sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) - self.assertFalse(sink.exists()) - retry(sink.create)() - self.to_delete.append(sink) - sink.filter_ = "BOGUS FILTER" - sink.destination = "BOGUS DESTINATION" - - sink.reload() - - self.assertEqual(sink.filter_, DEFAULT_FILTER) - self.assertEqual(sink.destination, uri) - - def test_update_sink(self): - SINK_NAME = "test-update-sink%s" % (_RESOURCE_ID,) - retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) - bucket_uri = self._init_storage_bucket() - dataset_uri = self._init_bigquery_dataset() - UPDATED_FILTER = "logName:syslog" - sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, bucket_uri) - self.assertFalse(sink.exists()) - retry(sink.create)() - self.to_delete.append(sink) - sink.filter_ = UPDATED_FILTER - sink.destination = dataset_uri - - sink.update() - - self.assertEqual(sink.filter_, UPDATED_FILTER) - self.assertEqual(sink.destination, dataset_uri) - - -class _DeleteWrapper(object): - def __init__(self, publisher, topic_path): - self.publisher = publisher - self.topic_path = topic_path - - def delete(self): - self.publisher.delete_topic(self.topic_path) diff --git a/logging/tests/unit/__init__.py b/logging/tests/unit/__init__.py deleted file mode 100644 index df379f1e9d88..000000000000 --- a/logging/tests/unit/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py b/logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py deleted file mode 100644 index 5ac89493cd7e..000000000000 --- a/logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py +++ /dev/null @@ -1,516 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import logging_v2 -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestConfigServiceV2Client(object): - def test_list_sinks(self): - # Setup Expected Response - next_page_token = "" - sinks_element = {} - sinks = [sinks_element] - expected_response = {"next_page_token": next_page_token, "sinks": sinks} - expected_response = logging_config_pb2.ListSinksResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_sinks(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.sinks[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.ListSinksRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_sinks_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_sinks(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_sink(self): - # Setup Expected Response - name = "name3373707" - destination = "destination-1429847026" - filter_ = "filter-1274492040" - writer_identity = "writerIdentity775638794" - include_children = True - expected_response = { - "name": name, - "destination": destination, - "filter": filter_, - "writer_identity": writer_identity, - "include_children": include_children, - } - expected_response = logging_config_pb2.LogSink(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - sink_name = client.sink_path("[PROJECT]", "[SINK]") - - response = client.get_sink(sink_name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.GetSinkRequest(sink_name=sink_name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_sink_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - sink_name = client.sink_path("[PROJECT]", "[SINK]") - - with pytest.raises(CustomException): - client.get_sink(sink_name) - - def test_create_sink(self): - # Setup Expected Response - name = "name3373707" - destination = "destination-1429847026" - filter_ = "filter-1274492040" - writer_identity = "writerIdentity775638794" - include_children = True - expected_response = { - "name": name, - "destination": destination, - "filter": filter_, - "writer_identity": writer_identity, - "include_children": include_children, - } - expected_response = logging_config_pb2.LogSink(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - sink = {} - - response = client.create_sink(parent, sink) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.CreateSinkRequest( - parent=parent, sink=sink - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_sink_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - sink = {} - - with pytest.raises(CustomException): - client.create_sink(parent, sink) - - def test_update_sink(self): - # Setup Expected Response - name = "name3373707" - destination = "destination-1429847026" - filter_ = "filter-1274492040" - writer_identity = "writerIdentity775638794" - include_children = True - expected_response = { - "name": name, - "destination": destination, - "filter": filter_, - "writer_identity": writer_identity, - "include_children": include_children, - } - expected_response = logging_config_pb2.LogSink(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - sink_name = client.sink_path("[PROJECT]", "[SINK]") - sink = {} - - response = client.update_sink(sink_name, sink) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.UpdateSinkRequest( - sink_name=sink_name, sink=sink - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_sink_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - sink_name = client.sink_path("[PROJECT]", "[SINK]") - sink = {} - - with pytest.raises(CustomException): - client.update_sink(sink_name, sink) - - def test_delete_sink(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - sink_name = client.sink_path("[PROJECT]", "[SINK]") - - client.delete_sink(sink_name) - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_sink_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - sink_name = client.sink_path("[PROJECT]", "[SINK]") - - with pytest.raises(CustomException): - client.delete_sink(sink_name) - - def test_list_exclusions(self): - # Setup Expected Response - next_page_token = "" - exclusions_element = {} - exclusions = [exclusions_element] - expected_response = { - "next_page_token": next_page_token, - "exclusions": exclusions, - } - expected_response = logging_config_pb2.ListExclusionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_exclusions(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.exclusions[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.ListExclusionsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_exclusions_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_exclusions(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_exclusion(self): - # Setup Expected Response - name_2 = "name2-1052831874" - description = "description-1724546052" - filter_ = "filter-1274492040" - disabled = True - expected_response = { - "name": name_2, - "description": description, - "filter": filter_, - "disabled": disabled, - } - expected_response = logging_config_pb2.LogExclusion(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") - - response = client.get_exclusion(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.GetExclusionRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_exclusion_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") - - with pytest.raises(CustomException): - client.get_exclusion(name) - - def test_create_exclusion(self): - # Setup Expected Response - name = "name3373707" - description = "description-1724546052" - filter_ = "filter-1274492040" - disabled = True - expected_response = { - "name": name, - "description": description, - "filter": filter_, - "disabled": disabled, - } - expected_response = logging_config_pb2.LogExclusion(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - exclusion = {} - - response = client.create_exclusion(parent, exclusion) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.CreateExclusionRequest( - parent=parent, exclusion=exclusion - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_exclusion_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - exclusion = {} - - with pytest.raises(CustomException): - client.create_exclusion(parent, exclusion) - - def test_update_exclusion(self): - # Setup Expected Response - name_2 = "name2-1052831874" - description = "description-1724546052" - filter_ = "filter-1274492040" - disabled = True - expected_response = { - "name": name_2, - "description": description, - "filter": filter_, - "disabled": disabled, - } - expected_response = logging_config_pb2.LogExclusion(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") - exclusion = {} - update_mask = {} - - response = client.update_exclusion(name, exclusion, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.UpdateExclusionRequest( - name=name, exclusion=exclusion, update_mask=update_mask - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_exclusion_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") - exclusion = {} - update_mask = {} - - with pytest.raises(CustomException): - client.update_exclusion(name, exclusion, update_mask) - - def test_delete_exclusion(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") - - client.delete_exclusion(name) - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.DeleteExclusionRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_exclusion_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") - - with pytest.raises(CustomException): - client.delete_exclusion(name) diff --git a/logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py b/logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py deleted file mode 100644 index 30aa9b807329..000000000000 --- a/logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py +++ /dev/null @@ -1,262 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.api import monitored_resource_pb2 -from google.cloud import logging_v2 -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_pb2 -from google.protobuf import empty_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestLoggingServiceV2Client(object): - def test_delete_log(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup Request - log_name = client.log_path("[PROJECT]", "[LOG]") - - client.delete_log(log_name) - - assert len(channel.requests) == 1 - expected_request = logging_pb2.DeleteLogRequest(log_name=log_name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_log_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup request - log_name = client.log_path("[PROJECT]", "[LOG]") - - with pytest.raises(CustomException): - client.delete_log(log_name) - - def test_write_log_entries(self): - # Setup Expected Response - expected_response = {} - expected_response = logging_pb2.WriteLogEntriesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup Request - entries = [] - - response = client.write_log_entries(entries) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_pb2.WriteLogEntriesRequest(entries=entries) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_write_log_entries_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup request - entries = [] - - with pytest.raises(CustomException): - client.write_log_entries(entries) - - def test_list_log_entries(self): - # Setup Expected Response - next_page_token = "" - entries_element = {} - entries = [entries_element] - expected_response = {"next_page_token": next_page_token, "entries": entries} - expected_response = logging_pb2.ListLogEntriesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup Request - resource_names = [] - - paged_list_response = client.list_log_entries(resource_names) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.entries[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = logging_pb2.ListLogEntriesRequest( - resource_names=resource_names - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_log_entries_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup request - resource_names = [] - - paged_list_response = client.list_log_entries(resource_names) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_list_monitored_resource_descriptors(self): - # Setup Expected Response - next_page_token = "" - resource_descriptors_element = {} - resource_descriptors = [resource_descriptors_element] - expected_response = { - "next_page_token": next_page_token, - "resource_descriptors": resource_descriptors, - } - expected_response = logging_pb2.ListMonitoredResourceDescriptorsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - paged_list_response = client.list_monitored_resource_descriptors() - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.resource_descriptors[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = logging_pb2.ListMonitoredResourceDescriptorsRequest() - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_monitored_resource_descriptors_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - paged_list_response = client.list_monitored_resource_descriptors() - with pytest.raises(CustomException): - list(paged_list_response) - - def test_list_logs(self): - # Setup Expected Response - next_page_token = "" - log_names_element = "logNamesElement-1079688374" - log_names = [log_names_element] - expected_response = {"next_page_token": next_page_token, "log_names": log_names} - expected_response = logging_pb2.ListLogsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_logs(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.log_names[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = logging_pb2.ListLogsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_logs_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_logs(parent) - with pytest.raises(CustomException): - list(paged_list_response) diff --git a/logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py b/logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py deleted file mode 100644 index e9dd3e348d48..000000000000 --- a/logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py +++ /dev/null @@ -1,288 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import logging_v2 -from google.cloud.logging_v2.proto import logging_metrics_pb2 -from google.protobuf import empty_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestMetricsServiceV2Client(object): - def test_list_log_metrics(self): - # Setup Expected Response - next_page_token = "" - metrics_element = {} - metrics = [metrics_element] - expected_response = {"next_page_token": next_page_token, "metrics": metrics} - expected_response = logging_metrics_pb2.ListLogMetricsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_log_metrics(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.metrics[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = logging_metrics_pb2.ListLogMetricsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_log_metrics_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_log_metrics(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_log_metric(self): - # Setup Expected Response - name = "name3373707" - description = "description-1724546052" - filter_ = "filter-1274492040" - value_extractor = "valueExtractor2047672534" - expected_response = { - "name": name, - "description": description, - "filter": filter_, - "value_extractor": value_extractor, - } - expected_response = logging_metrics_pb2.LogMetric(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup Request - metric_name = client.metric_path("[PROJECT]", "[METRIC]") - - response = client.get_log_metric(metric_name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_metrics_pb2.GetLogMetricRequest( - metric_name=metric_name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_log_metric_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup request - metric_name = client.metric_path("[PROJECT]", "[METRIC]") - - with pytest.raises(CustomException): - client.get_log_metric(metric_name) - - def test_create_log_metric(self): - # Setup Expected Response - name = "name3373707" - description = "description-1724546052" - filter_ = "filter-1274492040" - value_extractor = "valueExtractor2047672534" - expected_response = { - "name": name, - "description": description, - "filter": filter_, - "value_extractor": value_extractor, - } - expected_response = logging_metrics_pb2.LogMetric(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - metric = {} - - response = client.create_log_metric(parent, metric) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_metrics_pb2.CreateLogMetricRequest( - parent=parent, metric=metric - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_log_metric_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - metric = {} - - with pytest.raises(CustomException): - client.create_log_metric(parent, metric) - - def test_update_log_metric(self): - # Setup Expected Response - name = "name3373707" - description = "description-1724546052" - filter_ = "filter-1274492040" - value_extractor = "valueExtractor2047672534" - expected_response = { - "name": name, - "description": description, - "filter": filter_, - "value_extractor": value_extractor, - } - expected_response = logging_metrics_pb2.LogMetric(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup Request - metric_name = client.metric_path("[PROJECT]", "[METRIC]") - metric = {} - - response = client.update_log_metric(metric_name, metric) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_metrics_pb2.UpdateLogMetricRequest( - metric_name=metric_name, metric=metric - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_log_metric_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup request - metric_name = client.metric_path("[PROJECT]", "[METRIC]") - metric = {} - - with pytest.raises(CustomException): - client.update_log_metric(metric_name, metric) - - def test_delete_log_metric(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup Request - metric_name = client.metric_path("[PROJECT]", "[METRIC]") - - client.delete_log_metric(metric_name) - - assert len(channel.requests) == 1 - expected_request = logging_metrics_pb2.DeleteLogMetricRequest( - metric_name=metric_name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_log_metric_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup request - metric_name = client.metric_path("[PROJECT]", "[METRIC]") - - with pytest.raises(CustomException): - client.delete_log_metric(metric_name) diff --git a/logging/tests/unit/handlers/__init__.py b/logging/tests/unit/handlers/__init__.py deleted file mode 100644 index df379f1e9d88..000000000000 --- a/logging/tests/unit/handlers/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/logging/tests/unit/handlers/middleware/test_request.py b/logging/tests/unit/handlers/middleware/test_request.py deleted file mode 100644 index f606da573cec..000000000000 --- a/logging/tests/unit/handlers/middleware/test_request.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright 2017 Google LLC All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class DjangoBase(unittest.TestCase): - @classmethod - def setUpClass(cls): - from django.conf import settings - from django.test.utils import setup_test_environment - - if not settings.configured: - settings.configure() - setup_test_environment() - - @classmethod - def tearDownClass(cls): - from django.test.utils import teardown_test_environment - - teardown_test_environment() - - -class TestRequestMiddleware(DjangoBase): - def _get_target_class(self): - from google.cloud.logging.handlers.middleware import request - - return request.RequestMiddleware - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_process_request(self): - from django.test import RequestFactory - from google.cloud.logging.handlers.middleware import request - - middleware = self._make_one() - mock_request = RequestFactory().get("/") - middleware.process_request(mock_request) - - django_request = request._get_django_request() - self.assertEqual(django_request, mock_request) - - -class Test__get_django_request(DjangoBase): - @staticmethod - def _call_fut(): - from google.cloud.logging.handlers.middleware import request - - return request._get_django_request() - - @staticmethod - def _make_patch(new_locals): - return mock.patch( - "google.cloud.logging.handlers.middleware.request._thread_locals", - new=new_locals, - ) - - def test_with_request(self): - thread_locals = mock.Mock(spec=["request"]) - with self._make_patch(thread_locals): - django_request = self._call_fut() - - self.assertIs(django_request, thread_locals.request) - - def test_without_request(self): - thread_locals = mock.Mock(spec=[]) - with self._make_patch(thread_locals): - django_request = self._call_fut() - - self.assertIsNone(django_request) diff --git a/logging/tests/unit/handlers/test__helpers.py b/logging/tests/unit/handlers/test__helpers.py deleted file mode 100644 index 702015961771..000000000000 --- a/logging/tests/unit/handlers/test__helpers.py +++ /dev/null @@ -1,219 +0,0 @@ -# Copyright 2017 Google LLC All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import unittest - -import mock -import six - -try: - from webapp2 import RequestHandler -except SyntaxError: - # webapp2 has not been ported to python3, so it will give a syntax - # error if we try. We'll just skip the webapp2 tests in that case. - RequestHandler = object - - -class Test_get_trace_id_from_flask(unittest.TestCase): - @staticmethod - def _call_fut(): - from google.cloud.logging.handlers import _helpers - - return _helpers.get_trace_id_from_flask() - - @staticmethod - def create_app(): - import flask - - app = flask.Flask(__name__) - - @app.route("/") - def index(): - return "test flask trace" # pragma: NO COVER - - return app - - def test_no_context_header(self): - app = self.create_app() - with app.test_request_context(path="/", headers={}): - trace_id = self._call_fut() - - self.assertIsNone(trace_id) - - def test_valid_context_header(self): - flask_trace_header = "X_CLOUD_TRACE_CONTEXT" - expected_trace_id = "testtraceidflask" - flask_trace_id = expected_trace_id + "/testspanid" - - app = self.create_app() - context = app.test_request_context( - path="/", headers={flask_trace_header: flask_trace_id} - ) - - with context: - trace_id = self._call_fut() - - self.assertEqual(trace_id, expected_trace_id) - - -class _GetTraceId(RequestHandler): - def get(self): - from google.cloud.logging.handlers import _helpers - - trace_id = _helpers.get_trace_id_from_webapp2() - self.response.content_type = "application/json" - self.response.out.write(json.dumps(trace_id)) - - -@unittest.skipIf(not six.PY2, "webapp2 is Python 2 only") -class Test_get_trace_id_from_webapp2(unittest.TestCase): - @staticmethod - def create_app(): - import webapp2 - - app = webapp2.WSGIApplication([("/", _GetTraceId)]) - - return app - - def test_no_context_header(self): - import webob - - req = webob.BaseRequest.blank("/") - response = req.get_response(self.create_app()) - trace_id = json.loads(response.body) - - self.assertEqual(None, trace_id) - - def test_valid_context_header(self): - import webob - - webapp2_trace_header = "X-Cloud-Trace-Context" - expected_trace_id = "testtraceidwebapp2" - webapp2_trace_id = expected_trace_id + "/testspanid" - - req = webob.BaseRequest.blank( - "/", headers={webapp2_trace_header: webapp2_trace_id} - ) - response = req.get_response(self.create_app()) - trace_id = json.loads(response.body) - - self.assertEqual(trace_id, expected_trace_id) - - -class Test_get_trace_id_from_django(unittest.TestCase): - @staticmethod - def _call_fut(): - from google.cloud.logging.handlers import _helpers - - return _helpers.get_trace_id_from_django() - - def setUp(self): - from django.conf import settings - from django.test.utils import setup_test_environment - - if not settings.configured: - settings.configure() - setup_test_environment() - - def tearDown(self): - from django.test.utils import teardown_test_environment - from google.cloud.logging.handlers.middleware import request - - teardown_test_environment() - request._thread_locals.__dict__.clear() - - def test_no_context_header(self): - from django.test import RequestFactory - from google.cloud.logging.handlers.middleware import request - - django_request = RequestFactory().get("/") - - middleware = request.RequestMiddleware() - middleware.process_request(django_request) - trace_id = self._call_fut() - self.assertIsNone(trace_id) - - def test_valid_context_header(self): - from django.test import RequestFactory - from google.cloud.logging.handlers.middleware import request - - django_trace_header = "HTTP_X_CLOUD_TRACE_CONTEXT" - expected_trace_id = "testtraceiddjango" - django_trace_id = expected_trace_id + "/testspanid" - - django_request = RequestFactory().get( - "/", **{django_trace_header: django_trace_id} - ) - - middleware = request.RequestMiddleware() - middleware.process_request(django_request) - trace_id = self._call_fut() - - self.assertEqual(trace_id, expected_trace_id) - - -class Test_get_trace_id(unittest.TestCase): - @staticmethod - def _call_fut(): - from google.cloud.logging.handlers import _helpers - - return _helpers.get_trace_id() - - def _helper(self, django_return, flask_return): - django_patch = mock.patch( - "google.cloud.logging.handlers._helpers.get_trace_id_from_django", - return_value=django_return, - ) - flask_patch = mock.patch( - "google.cloud.logging.handlers._helpers.get_trace_id_from_flask", - return_value=flask_return, - ) - - with django_patch as django_mock: - with flask_patch as flask_mock: - trace_id = self._call_fut() - - return django_mock, flask_mock, trace_id - - def test_from_django(self): - django_mock, flask_mock, trace_id = self._helper("test-django-trace-id", None) - self.assertEqual(trace_id, django_mock.return_value) - - django_mock.assert_called_once_with() - flask_mock.assert_not_called() - - def test_from_flask(self): - django_mock, flask_mock, trace_id = self._helper(None, "test-flask-trace-id") - self.assertEqual(trace_id, flask_mock.return_value) - - django_mock.assert_called_once_with() - flask_mock.assert_called_once_with() - - def test_from_django_and_flask(self): - django_mock, flask_mock, trace_id = self._helper( - "test-django-trace-id", "test-flask-trace-id" - ) - # Django wins. - self.assertEqual(trace_id, django_mock.return_value) - - django_mock.assert_called_once_with() - flask_mock.assert_not_called() - - def test_missing(self): - django_mock, flask_mock, trace_id = self._helper(None, None) - self.assertIsNone(trace_id) - - django_mock.assert_called_once_with() - flask_mock.assert_called_once_with() diff --git a/logging/tests/unit/handlers/test_app_engine.py b/logging/tests/unit/handlers/test_app_engine.py deleted file mode 100644 index eef4ac7410e3..000000000000 --- a/logging/tests/unit/handlers/test_app_engine.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright 2016 Google LLC All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import unittest - -import mock - - -class TestAppEngineHandler(unittest.TestCase): - PROJECT = "PROJECT" - - def _get_target_class(self): - from google.cloud.logging.handlers.app_engine import AppEngineHandler - - return AppEngineHandler - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_constructor_w_gae_standard_env(self): - import sys - from google.cloud.logging.handlers import app_engine - - client = mock.Mock(project=self.PROJECT, spec=["project"]) - - # Verify that project/service/version are picked up from the - # environment. - with mock.patch( - "os.environ", - new={ - app_engine._GAE_PROJECT_ENV_STANDARD: "test_project", - app_engine._GAE_SERVICE_ENV: "test_service", - app_engine._GAE_VERSION_ENV: "test_version", - }, - ): - handler = self._make_one(client, transport=_Transport) - - self.assertIs(handler.client, client) - self.assertEqual(handler.name, app_engine._DEFAULT_GAE_LOGGER_NAME) - self.assertEqual(handler.resource.type, "gae_app") - self.assertEqual(handler.resource.labels["project_id"], "test_project") - self.assertEqual(handler.resource.labels["module_id"], "test_service") - self.assertEqual(handler.resource.labels["version_id"], "test_version") - self.assertIs(handler.stream, sys.stderr) - - def test_constructor_w_gae_flex_env(self): - import io - from google.cloud.logging.handlers import app_engine - - client = mock.Mock(project=self.PROJECT, spec=["project"]) - name = "test-logger" - stream = io.BytesIO() - - # Verify that _GAE_PROJECT_ENV_FLEX environment variable takes - # precedence over _GAE_PROJECT_ENV_STANDARD. - with mock.patch( - "os.environ", - new={ - app_engine._GAE_PROJECT_ENV_FLEX: "test_project_2", - app_engine._GAE_PROJECT_ENV_STANDARD: "test_project_should_be_overridden", - app_engine._GAE_SERVICE_ENV: "test_service_2", - app_engine._GAE_VERSION_ENV: "test_version_2", - }, - ): - handler = self._make_one( - client, name=name, transport=_Transport, stream=stream - ) - - self.assertIs(handler.client, client) - self.assertEqual(handler.name, name) - self.assertEqual(handler.resource.type, "gae_app") - self.assertEqual(handler.resource.labels["project_id"], "test_project_2") - self.assertEqual(handler.resource.labels["module_id"], "test_service_2") - self.assertEqual(handler.resource.labels["version_id"], "test_version_2") - self.assertIs(handler.stream, stream) - - def test_emit(self): - client = mock.Mock(project=self.PROJECT, spec=["project"]) - handler = self._make_one(client, transport=_Transport) - gae_resource = handler.get_gae_resource() - gae_labels = handler.get_gae_labels() - trace = None - logname = "app" - message = "hello world" - record = logging.LogRecord(logname, logging, None, None, message, None, None) - handler.emit(record) - - self.assertIs(handler.transport.client, client) - self.assertEqual(handler.transport.name, logname) - self.assertEqual( - handler.transport.send_called_with, - (record, message, gae_resource, gae_labels, trace), - ) - - def _get_gae_labels_helper(self, trace_id): - get_trace_patch = mock.patch( - "google.cloud.logging.handlers.app_engine.get_trace_id", - return_value=trace_id, - ) - - client = mock.Mock(project=self.PROJECT, spec=["project"]) - # The handler actually calls ``get_gae_labels()``. - with get_trace_patch as mock_get_trace: - handler = self._make_one(client, transport=_Transport) - - gae_labels = handler.get_gae_labels() - self.assertEqual(mock_get_trace.mock_calls, [mock.call()]) - - return gae_labels - - def test_get_gae_labels_with_label(self): - from google.cloud.logging.handlers import app_engine - - trace_id = "test-gae-trace-id" - gae_labels = self._get_gae_labels_helper(trace_id) - expected_labels = {app_engine._TRACE_ID_LABEL: trace_id} - self.assertEqual(gae_labels, expected_labels) - - def test_get_gae_labels_without_label(self): - gae_labels = self._get_gae_labels_helper(None) - self.assertEqual(gae_labels, {}) - - -class _Transport(object): - def __init__(self, client, name): - self.client = client - self.name = name - - def send(self, record, message, resource, labels, trace): - self.send_called_with = (record, message, resource, labels, trace) diff --git a/logging/tests/unit/handlers/test_container_engine.py b/logging/tests/unit/handlers/test_container_engine.py deleted file mode 100644 index 09ee329ba3f2..000000000000 --- a/logging/tests/unit/handlers/test_container_engine.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2016 Google LLC All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class TestContainerEngineHandler(unittest.TestCase): - PROJECT = "PROJECT" - - def _get_target_class(self): - from google.cloud.logging.handlers.container_engine import ( - ContainerEngineHandler, - ) - - return ContainerEngineHandler - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - handler = self._make_one() - self.assertIsNone(handler.name) - - def test_ctor_w_name(self): - handler = self._make_one(name="foo") - self.assertEqual(handler.name, "foo") - - def test_format(self): - import logging - import json - - handler = self._make_one() - logname = "loggername" - message = "hello world" - record = logging.LogRecord( - logname, logging.INFO, None, None, message, None, None - ) - record.created = 5.03 - expected_payload = { - "message": message, - "timestamp": {"seconds": 5, "nanos": int(0.03 * 1e9)}, - "thread": record.thread, - "severity": record.levelname, - } - payload = handler.format(record) - - self.assertEqual(payload, json.dumps(expected_payload)) diff --git a/logging/tests/unit/handlers/test_handlers.py b/logging/tests/unit/handlers/test_handlers.py deleted file mode 100644 index 5559791bc2fa..000000000000 --- a/logging/tests/unit/handlers/test_handlers.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import unittest - - -class TestCloudLoggingHandler(unittest.TestCase): - - PROJECT = "PROJECT" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.handlers.handlers import CloudLoggingHandler - - return CloudLoggingHandler - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - import sys - from google.cloud.logging.logger import _GLOBAL_RESOURCE - from google.cloud.logging.handlers.handlers import DEFAULT_LOGGER_NAME - - client = _Client(self.PROJECT) - handler = self._make_one(client, transport=_Transport) - self.assertEqual(handler.name, DEFAULT_LOGGER_NAME) - self.assertIs(handler.client, client) - self.assertIsInstance(handler.transport, _Transport) - self.assertIs(handler.transport.client, client) - self.assertEqual(handler.transport.name, DEFAULT_LOGGER_NAME) - self.assertIs(handler.resource, _GLOBAL_RESOURCE) - self.assertIsNone(handler.labels) - self.assertIs(handler.stream, sys.stderr) - - def test_ctor_explicit(self): - import io - from google.cloud.logging.resource import Resource - - resource = Resource("resource_type", {"resource_label": "value"}) - labels = {"handler_lable": "value"} - name = "test-logger" - client = _Client(self.PROJECT) - stream = io.BytesIO() - handler = self._make_one( - client, - name=name, - transport=_Transport, - resource=resource, - labels=labels, - stream=stream, - ) - self.assertEqual(handler.name, name) - self.assertIs(handler.client, client) - self.assertIsInstance(handler.transport, _Transport) - self.assertIs(handler.transport.client, client) - self.assertEqual(handler.transport.name, name) - self.assertIs(handler.resource, resource) - self.assertEqual(handler.labels, labels) - self.assertIs(handler.stream, stream) - - def test_emit(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE - - client = _Client(self.PROJECT) - handler = self._make_one( - client, transport=_Transport, resource=_GLOBAL_RESOURCE - ) - logname = "loggername" - message = "hello world" - record = logging.LogRecord(logname, logging, None, None, message, None, None) - handler.emit(record) - - self.assertEqual( - handler.transport.send_called_with, - (record, message, _GLOBAL_RESOURCE, None), - ) - - -class TestSetupLogging(unittest.TestCase): - def _call_fut(self, handler, excludes=None): - from google.cloud.logging.handlers.handlers import setup_logging - - if excludes: - return setup_logging(handler, excluded_loggers=excludes) - else: - return setup_logging(handler) - - def test_setup_logging(self): - handler = _Handler(logging.INFO) - self._call_fut(handler) - - root_handlers = logging.getLogger().handlers - self.assertIn(handler, root_handlers) - - def test_setup_logging_excludes(self): - INCLUDED_LOGGER_NAME = "includeme" - EXCLUDED_LOGGER_NAME = "excludeme" - - handler = _Handler(logging.INFO) - self._call_fut(handler, (EXCLUDED_LOGGER_NAME,)) - - included_logger = logging.getLogger(INCLUDED_LOGGER_NAME) - self.assertTrue(included_logger.propagate) - - excluded_logger = logging.getLogger(EXCLUDED_LOGGER_NAME) - self.assertNotIn(handler, excluded_logger.handlers) - self.assertFalse(excluded_logger.propagate) - - def setUp(self): - self._handlers_cache = logging.getLogger().handlers[:] - - def tearDown(self): - # cleanup handlers - logging.getLogger().handlers = self._handlers_cache[:] - - -class _Handler(object): - def __init__(self, level): - self.level = level - - def acquire(self): - pass # pragma: NO COVER - - def release(self): - pass # pragma: NO COVER - - -class _Client(object): - def __init__(self, project): - self.project = project - - -class _Transport(object): - def __init__(self, client, name): - self.client = client - self.name = name - - def send(self, record, message, resource, labels=None): - self.send_called_with = (record, message, resource, labels) diff --git a/logging/tests/unit/handlers/transports/__init__.py b/logging/tests/unit/handlers/transports/__init__.py deleted file mode 100644 index df379f1e9d88..000000000000 --- a/logging/tests/unit/handlers/transports/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/logging/tests/unit/handlers/transports/test_background_thread.py b/logging/tests/unit/handlers/transports/test_background_thread.py deleted file mode 100644 index 7edae8a7bfa0..000000000000 --- a/logging/tests/unit/handlers/transports/test_background_thread.py +++ /dev/null @@ -1,536 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time -import logging -import unittest - -import mock -from six.moves import queue - - -class TestBackgroundThreadHandler(unittest.TestCase): - PROJECT = "PROJECT" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.handlers.transports import BackgroundThreadTransport - - return BackgroundThreadTransport - - def _make_one(self, *args, **kw): - worker_patch = mock.patch( - "google.cloud.logging.handlers.transports." "background_thread._Worker", - autospec=True, - ) - with worker_patch as worker_mock: - return self._get_target_class()(*args, **kw), worker_mock - - def test_constructor(self): - client = _Client(self.PROJECT) - name = "python_logger" - - transport, worker = self._make_one(client, name) - - (logger,) = worker.call_args[0] # call_args[0] is *args. - self.assertEqual(logger.name, name) - - def test_send(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE - - client = _Client(self.PROJECT) - name = "python_logger" - - transport, _ = self._make_one(client, name) - - python_logger_name = "mylogger" - message = "hello world" - - record = logging.LogRecord( - python_logger_name, logging.INFO, None, None, message, None, None - ) - - transport.send(record, message, _GLOBAL_RESOURCE) - - transport.worker.enqueue.assert_called_once_with( - record, message, _GLOBAL_RESOURCE, None, trace=None, span_id=None - ) - - def test_trace_send(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE - - client = _Client(self.PROJECT) - name = "python_logger" - - transport, _ = self._make_one(client, name) - - python_logger_name = "mylogger" - message = "hello world" - trace = "the-project/trace/longlogTraceid" - - record = logging.LogRecord( - python_logger_name, logging.INFO, None, None, message, None, None - ) - - transport.send(record, message, _GLOBAL_RESOURCE, trace=trace) - - transport.worker.enqueue.assert_called_once_with( - record, message, _GLOBAL_RESOURCE, None, trace=trace, span_id=None - ) - - def test_span_send(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE - - client = _Client(self.PROJECT) - name = "python_logger" - - transport, _ = self._make_one(client, name) - - python_logger_name = "mylogger" - message = "hello world" - span_id = "the-project/trace/longlogTraceid/span/123456789012abbacdac" - - record = logging.LogRecord( - python_logger_name, logging.INFO, None, None, message, None, None - ) - - transport.send(record, message, _GLOBAL_RESOURCE, span_id=span_id) - - transport.worker.enqueue.assert_called_once_with( - record, message, _GLOBAL_RESOURCE, None, trace=None, span_id=span_id - ) - - def test_flush(self): - client = _Client(self.PROJECT) - name = "python_logger" - - transport, _ = self._make_one(client, name) - - transport.flush() - - transport.worker.flush.assert_called() - - def test_worker(self): - client = _Client(self.PROJECT) - name = "python_logger" - batch_size = 30 - grace_period = 20.0 - max_latency = 0.1 - transport, worker = self._make_one( - client, - name, - grace_period=grace_period, - batch_size=batch_size, - max_latency=max_latency, - ) - worker_grace_period = worker.call_args[1]["grace_period"] # **kwargs. - worker_batch_size = worker.call_args[1]["max_batch_size"] - worker_max_latency = worker.call_args[1]["max_latency"] - self.assertEqual(worker_grace_period, grace_period) - self.assertEqual(worker_batch_size, batch_size) - self.assertEqual(worker_max_latency, max_latency) - - -class Test_Worker(unittest.TestCase): - NAME = "python_logger" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.handlers.transports import background_thread - - return background_thread._Worker - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def _start_with_thread_patch(self, worker): - with mock.patch("threading.Thread", new=_Thread) as thread_mock: - with mock.patch("atexit.register") as atexit_mock: - worker.start() - return thread_mock, atexit_mock - - def test_constructor(self): - logger = _Logger(self.NAME) - grace_period = 50 - max_batch_size = 50 - max_latency = 0.1 - - worker = self._make_one( - logger, - grace_period=grace_period, - max_batch_size=max_batch_size, - max_latency=max_latency, - ) - - self.assertEqual(worker._cloud_logger, logger) - self.assertEqual(worker._grace_period, grace_period) - self.assertEqual(worker._max_batch_size, max_batch_size) - self.assertEqual(worker._max_latency, max_latency) - self.assertFalse(worker.is_alive) - self.assertIsNone(worker._thread) - - def test_start(self): - from google.cloud.logging.handlers.transports import background_thread - - worker = self._make_one(_Logger(self.NAME)) - - _, atexit_mock = self._start_with_thread_patch(worker) - - self.assertTrue(worker.is_alive) - self.assertIsNotNone(worker._thread) - self.assertTrue(worker._thread.daemon) - self.assertEqual(worker._thread._target, worker._thread_main) - self.assertEqual(worker._thread._name, background_thread._WORKER_THREAD_NAME) - atexit_mock.assert_called_once_with(worker._main_thread_terminated) - - # Calling start again should not start a new thread. - current_thread = worker._thread - self._start_with_thread_patch(worker) - self.assertIs(current_thread, worker._thread) - - def test_stop(self): - from google.cloud.logging.handlers.transports import background_thread - - grace_period = 5.0 - worker = self._make_one(_Logger(self.NAME)) - - self._start_with_thread_patch(worker) - thread = worker._thread - - worker.stop(grace_period) - - self.assertEqual(worker._queue.qsize(), 1) - self.assertEqual(worker._queue.get(), background_thread._WORKER_TERMINATOR) - self.assertFalse(worker.is_alive) - self.assertIsNone(worker._thread) - self.assertEqual(thread._timeout, grace_period) - - # Stopping twice should not be an error - worker.stop() - - def test_stop_no_grace(self): - worker = self._make_one(_Logger(self.NAME)) - - self._start_with_thread_patch(worker) - thread = worker._thread - - worker.stop() - - self.assertEqual(thread._timeout, None) - - def test__main_thread_terminated(self): - worker = self._make_one(_Logger(self.NAME)) - - self._start_with_thread_patch(worker) - worker._main_thread_terminated() - - self.assertFalse(worker.is_alive) - - # Calling twice should not be an error - worker._main_thread_terminated() - - def test__main_thread_terminated_non_empty_queue(self): - worker = self._make_one(_Logger(self.NAME)) - - self._start_with_thread_patch(worker) - record = mock.Mock() - record.created = time.time() - worker.enqueue(record, "") - worker._main_thread_terminated() - - self.assertFalse(worker.is_alive) - - def test__main_thread_terminated_did_not_join(self): - worker = self._make_one(_Logger(self.NAME)) - - self._start_with_thread_patch(worker) - worker._thread._terminate_on_join = False - record = mock.Mock() - record.created = time.time() - worker.enqueue(record, "") - worker._main_thread_terminated() - - self.assertFalse(worker.is_alive) - - @staticmethod - def _enqueue_record(worker, message, levelno=logging.INFO, **kw): - record = logging.LogRecord("testing", levelno, None, None, message, None, None) - worker.enqueue(record, message, **kw) - - def test_enqueue_defaults(self): - import datetime - from google.cloud.logging._helpers import LogSeverity - - worker = self._make_one(_Logger(self.NAME)) - self.assertTrue(worker._queue.empty()) - message = "TEST SEVERITY" - - self._enqueue_record(worker, message) - - entry = worker._queue.get_nowait() - expected_info = {"message": message, "python_logger": "testing"} - self.assertEqual(entry["info"], expected_info) - self.assertEqual(entry["severity"], LogSeverity.INFO) - self.assertIsNone(entry["resource"]) - self.assertIsNone(entry["labels"]) - self.assertIsNone(entry["trace"]) - self.assertIsNone(entry["span_id"]) - self.assertIsInstance(entry["timestamp"], datetime.datetime) - - def test_enqueue_explicit(self): - import datetime - from google.cloud.logging._helpers import LogSeverity - - worker = self._make_one(_Logger(self.NAME)) - self.assertTrue(worker._queue.empty()) - message = "TEST SEVERITY" - resource = object() - labels = {"foo": "bar"} - trace = "TRACE" - span_id = "SPAN_ID" - - self._enqueue_record( - worker, - message, - levelno=logging.ERROR, - resource=resource, - labels=labels, - trace=trace, - span_id=span_id, - ) - - entry = worker._queue.get_nowait() - - expected_info = {"message": message, "python_logger": "testing"} - self.assertEqual(entry["info"], expected_info) - self.assertEqual(entry["severity"], LogSeverity.ERROR) - self.assertIs(entry["resource"], resource) - self.assertIs(entry["labels"], labels) - self.assertIs(entry["trace"], trace) - self.assertIs(entry["span_id"], span_id) - self.assertIsInstance(entry["timestamp"], datetime.datetime) - - def test__thread_main(self): - from google.cloud.logging.handlers.transports import background_thread - - worker = self._make_one(_Logger(self.NAME)) - - # Enqueue two records and the termination signal. - self._enqueue_record(worker, "1") - self._enqueue_record(worker, "2") - worker._queue.put_nowait(background_thread._WORKER_TERMINATOR) - - worker._thread_main() - - self.assertTrue(worker._cloud_logger._batch.commit_called) - self.assertEqual(worker._cloud_logger._batch.commit_count, 2) - self.assertEqual(worker._queue.qsize(), 0) - - def test__thread_main_error(self): - from google.cloud.logging.handlers.transports import background_thread - - worker = self._make_one(_Logger(self.NAME)) - worker._cloud_logger._batch_cls = _RaisingBatch - - # Enqueue one record and the termination signal. - self._enqueue_record(worker, "1") - worker._queue.put_nowait(background_thread._WORKER_TERMINATOR) - - worker._thread_main() - - self.assertTrue(worker._cloud_logger._batch.commit_called) - self.assertEqual(worker._queue.qsize(), 0) - - def test__thread_main_batches(self): - from google.cloud.logging.handlers.transports import background_thread - - worker = self._make_one(_Logger(self.NAME), max_batch_size=2) - - # Enqueue three records and the termination signal. This should be - # enough to perform two separate batches and a third loop with just - # the exit. - self._enqueue_record(worker, "1") - self._enqueue_record(worker, "2") - self._enqueue_record(worker, "3") - self._enqueue_record(worker, "4") - worker._queue.put_nowait(background_thread._WORKER_TERMINATOR) - - worker._thread_main() - - # The last batch should not have been executed because it had no items. - self.assertFalse(worker._cloud_logger._batch.commit_called) - self.assertEqual(worker._queue.qsize(), 0) - - @mock.patch("time.time", autospec=True, return_value=1) - def test__thread_main_max_latency(self, time): - # Note: this test is a bit brittle as it assumes the operation of - # _get_many invokes queue.get() followed by queue._get(). It fails - # the "change detector" test in that way. However, this is still a - # useful test to verify the queue timeout is appropriately calculated. - from six.moves import queue - from google.cloud.logging.handlers.transports import background_thread - - # Use monotonically increasing time. - time.side_effect = range(1, 6) - - worker = self._make_one(_Logger(self.NAME), max_latency=2, max_batch_size=10) - worker._queue = mock.create_autospec(queue.Queue, instance=True) - - worker._queue.get.side_effect = [ - {"info": {"message": "1"}}, # Single record. - queue.Empty(), # Emulate a queue.get() timeout. - {"info": {"message": "1"}}, # Second record. - background_thread._WORKER_TERMINATOR, # Stop the thread. - queue.Empty(), # Emulate a queue.get() timeout. - ] - - worker._thread_main() - - self.assertEqual(worker._cloud_logger._num_batches, 2) - self.assertTrue(worker._cloud_logger._batch.commit_called) - self.assertEqual(worker._cloud_logger._batch.commit_count, 1) - - # Time should have been called five times. - # - # For the first batch, it should have been called: - # * Once to get the start time. (1) - # * Once to get the elapsed time while grabbing the second item. - # (2) - # - # For the second batch, it should have been called: - # * Once to get start time. (3) - # * Once to get the elapsed time while grabbing the second item. - # (3) - # * Once to get the elapsed time while grabbing the final - # item. (4) - # * Once final time to get the elapsed time while receiving - # the empty queue. - # - self.assertEqual(time.call_count, 5) - - # Queue.get should've been called 5 times as well, but with different - # timeouts due to the monotonically increasing time. - # - # For the first batch, it will be called once without a timeout - # (for the first item) and then with timeout=1, as start will be - # 1 and now will be 2. - # - # For the second batch, it will be called once without a timeout - # (for the first item) and then with timeout=1, as start will be - # 3 and now will be 4, and finally with timeout=0 as start will be 3 - # and now will be 5. - # - worker._queue.get.assert_has_calls( - [ - mock.call(), - mock.call(timeout=1), - mock.call(), - mock.call(timeout=1), - mock.call(timeout=0), - ] - ) - - def test_flush(self): - worker = self._make_one(_Logger(self.NAME)) - worker._queue = mock.Mock(spec=queue.Queue) - - # Queue is empty, should not block. - worker.flush() - worker._queue.join.assert_called() - - -class _Thread(object): - def __init__(self, target, name): - self._target = target - self._name = name - self._timeout = None - self._terminate_on_join = True - self.daemon = False - - def is_alive(self): - return self._is_alive - - def start(self): - self._is_alive = True - - def stop(self): - self._is_alive = False - - def join(self, timeout=None): - self._timeout = timeout - if self._terminate_on_join: - self.stop() - - -class _Batch(object): - def __init__(self): - self.entries = [] - self.commit_called = False - self.commit_count = None - - def log_struct( - self, - info, - severity=logging.INFO, - resource=None, - labels=None, - trace=None, - span_id=None, - timestamp=None, - ): - from google.cloud.logging.logger import _GLOBAL_RESOURCE - - assert resource is None - resource = _GLOBAL_RESOURCE - - self.log_struct_called_with = (info, severity, resource, labels, trace, span_id) - self.entries.append(info) - - def commit(self): - self.commit_called = True - self.commit_count = len(self.entries) - del self.entries[:] - - -class _RaisingBatch(_Batch): - def commit(self): - self.commit_called = True - raise ValueError("This batch raises on commit.") - - -class _Logger(object): - def __init__(self, name): - self.name = name - self._batch_cls = _Batch - self._batch = None - self._num_batches = 0 - - def batch(self): - self._batch = self._batch_cls() - self._num_batches += 1 - return self._batch - - -class _Client(object): - def __init__(self, project, _http=None, credentials=None): - import mock - - self.project = project - self._http = _http - self._credentials = credentials - self._connection = mock.Mock(credentials=credentials, spec=["credentials"]) - - def logger(self, name): # pylint: disable=unused-argument - self._logger = _Logger(name) - return self._logger diff --git a/logging/tests/unit/handlers/transports/test_base.py b/logging/tests/unit/handlers/transports/test_base.py deleted file mode 100644 index 03612e115a98..000000000000 --- a/logging/tests/unit/handlers/transports/test_base.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class TestBaseHandler(unittest.TestCase): - - PROJECT = "PROJECT" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.handlers.transports import Transport - - return Transport - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_send_is_abstract(self): - target = self._make_one() - with self.assertRaises(NotImplementedError): - target.send(None, None, None) - - def test_flush_is_abstract_and_optional(self): - target = self._make_one() - target.flush() diff --git a/logging/tests/unit/handlers/transports/test_sync.py b/logging/tests/unit/handlers/transports/test_sync.py deleted file mode 100644 index f2ff67d59d82..000000000000 --- a/logging/tests/unit/handlers/transports/test_sync.py +++ /dev/null @@ -1,107 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import unittest - - -class TestSyncHandler(unittest.TestCase): - - PROJECT = "PROJECT" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.handlers.transports import SyncTransport - - return SyncTransport - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - client = _Client(self.PROJECT) - NAME = "python_logger" - transport = self._make_one(client, NAME) - self.assertEqual(transport.logger.name, "python_logger") - - def test_send(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE - from google.cloud.logging._helpers import LogSeverity - - client = _Client(self.PROJECT) - - stackdriver_logger_name = "python" - python_logger_name = "mylogger" - transport = self._make_one(client, stackdriver_logger_name) - message = "hello world" - record = logging.LogRecord( - python_logger_name, logging.INFO, None, None, message, None, None - ) - - transport.send(record, message, _GLOBAL_RESOURCE) - EXPECTED_STRUCT = {"message": message, "python_logger": python_logger_name} - EXPECTED_SENT = ( - EXPECTED_STRUCT, - LogSeverity.INFO, - _GLOBAL_RESOURCE, - None, - None, - None, - ) - self.assertEqual(transport.logger.log_struct_called_with, EXPECTED_SENT) - - -class _Logger(object): - from google.cloud.logging.logger import _GLOBAL_RESOURCE - - def __init__(self, name): - self.name = name - - def log_struct( - self, - message, - severity=None, - resource=_GLOBAL_RESOURCE, - labels=None, - trace=None, - span_id=None, - ): - self.log_struct_called_with = ( - message, - severity, - resource, - labels, - trace, - span_id, - ) - - -class _Client(object): - def __init__(self, project): - self.project = project - - def logger(self, name): # pylint: disable=unused-argument - self._logger = _Logger(name) - return self._logger - - -class _Handler(object): - def __init__(self, level): - self.level = level # pragma: NO COVER - - def acquire(self): - pass # pragma: NO COVER - - def release(self): - pass # pragma: NO COVER diff --git a/logging/tests/unit/test__gapic.py b/logging/tests/unit/test__gapic.py deleted file mode 100644 index ad6ded2bd1f7..000000000000 --- a/logging/tests/unit/test__gapic.py +++ /dev/null @@ -1,615 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -from google.api_core import grpc_helpers -import google.auth.credentials -from google.protobuf import empty_pb2 -import mock - -import google.cloud.logging -from google.cloud.logging import _gapic -from google.cloud.logging_v2.gapic import config_service_v2_client -from google.cloud.logging_v2.gapic import logging_service_v2_client -from google.cloud.logging_v2.gapic import metrics_service_v2_client -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.cloud.logging_v2.proto import logging_metrics_pb2 - - -PROJECT = "PROJECT" -PROJECT_PATH = "projects/%s" % (PROJECT,) -FILTER = "logName:syslog AND severity>=ERROR" - - -class Test_LoggingAPI(object): - LOG_NAME = "log_name" - LOG_PATH = "projects/%s/logs/%s" % (PROJECT, LOG_NAME) - - @staticmethod - def make_logging_api(): - channel = grpc_helpers.ChannelStub() - gapic_client = logging_service_v2_client.LoggingServiceV2Client(channel=channel) - handwritten_client = mock.Mock() - api = _gapic._LoggingAPI(gapic_client, handwritten_client) - return channel, api - - def test_ctor(self): - channel = grpc_helpers.ChannelStub() - gapic_client = logging_service_v2_client.LoggingServiceV2Client(channel=channel) - api = _gapic._LoggingAPI(gapic_client, mock.sentinel.client) - assert api._gapic_api is gapic_client - assert api._client is mock.sentinel.client - - def test_list_entries(self): - channel, api = self.make_logging_api() - - log_entry_msg = log_entry_pb2.LogEntry( - log_name=self.LOG_PATH, text_payload="text" - ) - channel.ListLogEntries.response = logging_pb2.ListLogEntriesResponse( - entries=[log_entry_msg] - ) - result = api.list_entries([PROJECT], FILTER, google.cloud.logging.DESCENDING) - - entries = list(result) - - # Check the response - assert len(entries) == 1 - entry = entries[0] - assert isinstance(entry, google.cloud.logging.entries.TextEntry) - assert entry.payload == "text" - - # Check the request - assert len(channel.ListLogEntries.requests) == 1 - request = channel.ListLogEntries.requests[0] - assert request.project_ids == [PROJECT] - assert request.filter == FILTER - assert request.order_by == google.cloud.logging.DESCENDING - - def test_list_entries_with_options(self): - channel, api = self.make_logging_api() - - channel.ListLogEntries.response = logging_pb2.ListLogEntriesResponse(entries=[]) - - result = api.list_entries( - [PROJECT], - FILTER, - google.cloud.logging.ASCENDING, - page_size=42, - page_token="token", - ) - - list(result) - - # Check the request - assert len(channel.ListLogEntries.requests) == 1 - request = channel.ListLogEntries.requests[0] - assert request.project_ids == [PROJECT] - assert request.filter == FILTER - assert request.order_by == google.cloud.logging.ASCENDING - assert request.page_size == 42 - assert request.page_token == "token" - - def test_write_entries_single(self): - channel, api = self.make_logging_api() - - channel.WriteLogEntries.response = empty_pb2.Empty() - - entry = { - "logName": self.LOG_PATH, - "resource": {"type": "global"}, - "textPayload": "text", - } - - api.write_entries([entry]) - - # Check the request - assert len(channel.WriteLogEntries.requests) == 1 - request = channel.WriteLogEntries.requests[0] - assert request.partial_success is False - assert len(request.entries) == 1 - assert request.entries[0].log_name == entry["logName"] - assert request.entries[0].resource.type == entry["resource"]["type"] - assert request.entries[0].text_payload == "text" - - def test_logger_delete(self): - channel, api = self.make_logging_api() - - channel.DeleteLog.response = empty_pb2.Empty() - - api.logger_delete(PROJECT, self.LOG_NAME) - - assert len(channel.DeleteLog.requests) == 1 - request = channel.DeleteLog.requests[0] - assert request.log_name == self.LOG_PATH - - -class Test_SinksAPI(object): - SINK_NAME = "sink_name" - SINK_PATH = "projects/%s/sinks/%s" % (PROJECT, SINK_NAME) - DESTINATION_URI = "faux.googleapis.com/destination" - SINK_WRITER_IDENTITY = "serviceAccount:project-123@example.com" - - @staticmethod - def make_sinks_api(): - channel = grpc_helpers.ChannelStub() - gapic_client = config_service_v2_client.ConfigServiceV2Client(channel=channel) - handwritten_client = mock.Mock() - api = _gapic._SinksAPI(gapic_client, handwritten_client) - return channel, api - - def test_ctor(self): - channel = grpc_helpers.ChannelStub() - gapic_client = config_service_v2_client.ConfigServiceV2Client(channel=channel) - api = _gapic._SinksAPI(gapic_client, mock.sentinel.client) - assert api._gapic_api is gapic_client - assert api._client is mock.sentinel.client - - def test_list_sinks(self): - channel, api = self.make_sinks_api() - - sink_msg = logging_config_pb2.LogSink( - name=self.SINK_PATH, destination=self.DESTINATION_URI, filter=FILTER - ) - channel.ListSinks.response = logging_config_pb2.ListSinksResponse( - sinks=[sink_msg] - ) - - result = api.list_sinks(PROJECT) - sinks = list(result) - - # Check the response - assert len(sinks) == 1 - sink = sinks[0] - assert isinstance(sink, google.cloud.logging.sink.Sink) - assert sink.name == self.SINK_PATH - assert sink.destination == self.DESTINATION_URI - assert sink.filter_ == FILTER - - # Check the request - assert len(channel.ListSinks.requests) == 1 - request = channel.ListSinks.requests[0] - assert request.parent == PROJECT_PATH - - def test_list_sinks_with_options(self): - channel, api = self.make_sinks_api() - - channel.ListSinks.response = logging_config_pb2.ListSinksResponse(sinks=[]) - - result = api.list_sinks(PROJECT, page_size=42, page_token="token") - list(result) - - # Check the request - assert len(channel.ListSinks.requests) == 1 - request = channel.ListSinks.requests[0] - assert request.parent == "projects/%s" % PROJECT - assert request.page_size == 42 - assert request.page_token == "token" - - def test_sink_create(self): - channel, api = self.make_sinks_api() - - channel.CreateSink.response = logging_config_pb2.LogSink( - name=self.SINK_NAME, - destination=self.DESTINATION_URI, - filter=FILTER, - writer_identity=self.SINK_WRITER_IDENTITY, - ) - - result = api.sink_create( - PROJECT, - self.SINK_NAME, - FILTER, - self.DESTINATION_URI, - unique_writer_identity=True, - ) - - # Check response - assert result == { - "name": self.SINK_NAME, - "filter": FILTER, - "destination": self.DESTINATION_URI, - "writerIdentity": self.SINK_WRITER_IDENTITY, - } - - # Check request - assert len(channel.CreateSink.requests) == 1 - request = channel.CreateSink.requests[0] - assert request.parent == PROJECT_PATH - assert request.unique_writer_identity is True - assert request.sink.name == self.SINK_NAME - assert request.sink.filter == FILTER - assert request.sink.destination == self.DESTINATION_URI - - def test_sink_get(self): - channel, api = self.make_sinks_api() - - channel.GetSink.response = logging_config_pb2.LogSink( - name=self.SINK_PATH, destination=self.DESTINATION_URI, filter=FILTER - ) - - response = api.sink_get(PROJECT, self.SINK_NAME) - - # Check response - assert response == { - "name": self.SINK_PATH, - "filter": FILTER, - "destination": self.DESTINATION_URI, - } - - # Check request - assert len(channel.GetSink.requests) == 1 - request = channel.GetSink.requests[0] - assert request.sink_name == self.SINK_PATH - - def test_sink_update(self): - channel, api = self.make_sinks_api() - - channel.UpdateSink.response = logging_config_pb2.LogSink( - name=self.SINK_NAME, - destination=self.DESTINATION_URI, - filter=FILTER, - writer_identity=self.SINK_WRITER_IDENTITY, - ) - - result = api.sink_update( - PROJECT, - self.SINK_NAME, - FILTER, - self.DESTINATION_URI, - unique_writer_identity=True, - ) - - # Check response - assert result == { - "name": self.SINK_NAME, - "filter": FILTER, - "destination": self.DESTINATION_URI, - "writerIdentity": self.SINK_WRITER_IDENTITY, - } - - # Check request - assert len(channel.UpdateSink.requests) == 1 - request = channel.UpdateSink.requests[0] - assert request.sink_name == self.SINK_PATH - assert request.unique_writer_identity is True - assert request.sink.name == self.SINK_PATH - assert request.sink.filter == FILTER - assert request.sink.destination == self.DESTINATION_URI - - def test_sink_delete(self): - channel, api = self.make_sinks_api() - - channel.DeleteSink.response = empty_pb2.Empty() - - api.sink_delete(PROJECT, self.SINK_NAME) - - assert len(channel.DeleteSink.requests) == 1 - request = channel.DeleteSink.requests[0] - assert request.sink_name == self.SINK_PATH - - -class Test_MetricsAPI(object): - METRIC_NAME = "metric_name" - METRIC_PATH = "projects/%s/metrics/%s" % (PROJECT, METRIC_NAME) - DESCRIPTION = "Description" - - @staticmethod - def make_metrics_api(): - channel = grpc_helpers.ChannelStub() - gapic_client = metrics_service_v2_client.MetricsServiceV2Client(channel=channel) - handwritten_client = mock.Mock() - api = _gapic._MetricsAPI(gapic_client, handwritten_client) - return channel, api - - def test_ctor(self): - channel = grpc_helpers.ChannelStub() - gapic_client = metrics_service_v2_client.MetricsServiceV2Client(channel=channel) - api = _gapic._MetricsAPI(gapic_client, mock.sentinel.client) - assert api._gapic_api is gapic_client - assert api._client is mock.sentinel.client - - def test_list_metrics(self): - channel, api = self.make_metrics_api() - - sink_msg = logging_metrics_pb2.LogMetric( - name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER - ) - channel.ListLogMetrics.response = logging_metrics_pb2.ListLogMetricsResponse( - metrics=[sink_msg] - ) - - result = api.list_metrics(PROJECT) - metrics = list(result) - - # Check the response - assert len(metrics) == 1 - metric = metrics[0] - assert isinstance(metric, google.cloud.logging.metric.Metric) - assert metric.name == self.METRIC_PATH - assert metric.description == self.DESCRIPTION - assert metric.filter_ == FILTER - - # Check the request - assert len(channel.ListLogMetrics.requests) == 1 - request = channel.ListLogMetrics.requests[0] - assert request.parent == PROJECT_PATH - - def test_list_metrics_options(self): - channel, api = self.make_metrics_api() - - channel.ListLogMetrics.response = logging_metrics_pb2.ListLogMetricsResponse( - metrics=[] - ) - - result = api.list_metrics(PROJECT, page_size=42, page_token="token") - list(result) - - # Check the request - assert len(channel.ListLogMetrics.requests) == 1 - request = channel.ListLogMetrics.requests[0] - assert request.parent == PROJECT_PATH - assert request.page_size == 42 - assert request.page_token == "token" - - def test_metric_create(self): - channel, api = self.make_metrics_api() - - channel.CreateLogMetric.response = empty_pb2.Empty() - - api.metric_create(PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION) - - # Check the request - assert len(channel.CreateLogMetric.requests) == 1 - request = channel.CreateLogMetric.requests[0] - assert request.parent == PROJECT_PATH - assert request.metric.name == self.METRIC_NAME - assert request.metric.filter == FILTER - assert request.metric.description == self.DESCRIPTION - - def test_metric_get(self): - channel, api = self.make_metrics_api() - - channel.GetLogMetric.response = logging_metrics_pb2.LogMetric( - name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER - ) - - response = api.metric_get(PROJECT, self.METRIC_NAME) - - # Check the response - assert response == { - "name": self.METRIC_PATH, - "filter": FILTER, - "description": self.DESCRIPTION, - } - - # Check the request - assert len(channel.GetLogMetric.requests) == 1 - request = channel.GetLogMetric.requests[0] - assert request.metric_name == self.METRIC_PATH - - def test_metric_update(self): - channel, api = self.make_metrics_api() - - channel.UpdateLogMetric.response = logging_metrics_pb2.LogMetric( - name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER - ) - - response = api.metric_update( - PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION - ) - - # Check the response - assert response == { - "name": self.METRIC_PATH, - "filter": FILTER, - "description": self.DESCRIPTION, - } - - # Check the request - assert len(channel.UpdateLogMetric.requests) == 1 - request = channel.UpdateLogMetric.requests[0] - assert request.metric_name == self.METRIC_PATH - assert request.metric.name == self.METRIC_PATH - assert request.metric.filter == FILTER - assert request.metric.description == self.DESCRIPTION - - def test_metric_delete(self): - channel, api = self.make_metrics_api() - - channel.DeleteLogMetric.response = empty_pb2.Empty() - - api.metric_delete(PROJECT, self.METRIC_NAME) - - assert len(channel.DeleteLogMetric.requests) == 1 - request = channel.DeleteLogMetric.requests[0] - assert request.metric_name == self.METRIC_PATH - - -class Test__parse_log_entry(unittest.TestCase): - @staticmethod - def _call_fut(*args, **kwargs): - from google.cloud.logging._gapic import _parse_log_entry - - return _parse_log_entry(*args, **kwargs) - - def test_simple(self): - from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry - - entry_pb = LogEntry(log_name=u"lol-jk", text_payload=u"bah humbug") - result = self._call_fut(entry_pb) - expected = {"logName": entry_pb.log_name, "textPayload": entry_pb.text_payload} - self.assertEqual(result, expected) - - @mock.patch("google.cloud.logging._gapic.MessageToDict", side_effect=TypeError) - def test_non_registry_failure(self, msg_to_dict_mock): - entry_pb = mock.Mock(spec=["HasField"]) - entry_pb.HasField.return_value = False - with self.assertRaises(TypeError): - self._call_fut(entry_pb) - - entry_pb.HasField.assert_called_once_with("proto_payload") - msg_to_dict_mock.assert_called_once_with(entry_pb) - - def test_unregistered_type(self): - from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry - from google.protobuf import any_pb2 - from google.protobuf import descriptor_pool - from google.protobuf.timestamp_pb2 import Timestamp - - pool = descriptor_pool.Default() - type_name = "google.bigtable.admin.v2.UpdateClusterMetadata" - # Make sure the descriptor is not known in the registry. - with self.assertRaises(KeyError): - pool.FindMessageTypeByName(type_name) - - type_url = "type.googleapis.com/" + type_name - metadata_bytes = b"\n\n\n\x03foo\x12\x03bar\x12\x06\x08\xbd\xb6\xfb\xc6\x05" - any_pb = any_pb2.Any(type_url=type_url, value=metadata_bytes) - timestamp = Timestamp(seconds=61, nanos=1234000) - - entry_pb = LogEntry(proto_payload=any_pb, timestamp=timestamp) - result = self._call_fut(entry_pb) - self.assertEqual(len(result), 2) - self.assertEqual(result["timestamp"], "1970-01-01T00:01:01.001234Z") - # NOTE: This "hack" is needed on Windows, where the equality check - # for an ``Any`` instance fails on unregistered types. - self.assertEqual(result["protoPayload"].type_url, type_url) - self.assertEqual(result["protoPayload"].value, metadata_bytes) - - def test_registered_type(self): - from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry - from google.protobuf import any_pb2 - from google.protobuf import descriptor_pool - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - pool = descriptor_pool.Default() - type_name = "google.protobuf.Struct" - # Make sure the descriptor is known in the registry. - descriptor = pool.FindMessageTypeByName(type_name) - self.assertEqual(descriptor.name, "Struct") - - type_url = "type.googleapis.com/" + type_name - field_name = "foo" - field_value = u"Bar" - struct_pb = Struct(fields={field_name: Value(string_value=field_value)}) - any_pb = any_pb2.Any(type_url=type_url, value=struct_pb.SerializeToString()) - - entry_pb = LogEntry(proto_payload=any_pb, log_name=u"all-good") - result = self._call_fut(entry_pb) - expected_proto = { - "logName": entry_pb.log_name, - "protoPayload": {"@type": type_url, "value": {field_name: field_value}}, - } - self.assertEqual(result, expected_proto) - - -class Test__log_entry_mapping_to_pb(unittest.TestCase): - @staticmethod - def _call_fut(*args, **kwargs): - from google.cloud.logging._gapic import _log_entry_mapping_to_pb - - return _log_entry_mapping_to_pb(*args, **kwargs) - - def test_simple(self): - from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry - - result = self._call_fut({}) - self.assertEqual(result, LogEntry()) - - def test_unregistered_type(self): - from google.protobuf import descriptor_pool - from google.protobuf.json_format import ParseError - - pool = descriptor_pool.Default() - type_name = "google.bigtable.admin.v2.UpdateClusterMetadata" - # Make sure the descriptor is not known in the registry. - with self.assertRaises(KeyError): - pool.FindMessageTypeByName(type_name) - - type_url = "type.googleapis.com/" + type_name - json_mapping = { - "protoPayload": { - "@type": type_url, - "originalRequest": {"name": "foo", "location": "bar"}, - "requestTime": {"seconds": 1491000125}, - } - } - with self.assertRaises(ParseError): - self._call_fut(json_mapping) - - def test_registered_type(self): - from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry - from google.protobuf import any_pb2 - from google.protobuf import descriptor_pool - - pool = descriptor_pool.Default() - type_name = "google.protobuf.Struct" - # Make sure the descriptor is known in the registry. - descriptor = pool.FindMessageTypeByName(type_name) - self.assertEqual(descriptor.name, "Struct") - - type_url = "type.googleapis.com/" + type_name - field_name = "foo" - field_value = u"Bar" - json_mapping = { - "logName": u"hi-everybody", - "protoPayload": {"@type": type_url, "value": {field_name: field_value}}, - } - # Convert to a valid LogEntry. - result = self._call_fut(json_mapping) - entry_pb = LogEntry( - log_name=json_mapping["logName"], - proto_payload=any_pb2.Any( - type_url=type_url, value=b"\n\014\n\003foo\022\005\032\003Bar" - ), - ) - self.assertEqual(result, entry_pb) - - -@mock.patch("google.cloud.logging._gapic.LoggingServiceV2Client", autospec=True) -def test_make_logging_api(gapic_client): - client = mock.Mock(spec=["_credentials", "_client_info"]) - api = _gapic.make_logging_api(client) - assert api._client == client - assert api._gapic_api == gapic_client.return_value - gapic_client.assert_called_once_with( - credentials=client._credentials, client_info=client._client_info - ) - - -@mock.patch("google.cloud.logging._gapic.MetricsServiceV2Client", autospec=True) -def test_make_metrics_api(gapic_client): - client = mock.Mock(spec=["_credentials", "_client_info"]) - api = _gapic.make_metrics_api(client) - assert api._client == client - assert api._gapic_api == gapic_client.return_value - gapic_client.assert_called_once_with( - credentials=client._credentials, client_info=client._client_info - ) - - -@mock.patch("google.cloud.logging._gapic.ConfigServiceV2Client", autospec=True) -def test_make_sinks_api(gapic_client): - client = mock.Mock(spec=["_credentials", "_client_info"]) - api = _gapic.make_sinks_api(client) - assert api._client == client - assert api._gapic_api == gapic_client.return_value - gapic_client.assert_called_once_with( - credentials=client._credentials, client_info=client._client_info - ) diff --git a/logging/tests/unit/test__helpers.py b/logging/tests/unit/test__helpers.py deleted file mode 100644 index db0804e66638..000000000000 --- a/logging/tests/unit/test__helpers.py +++ /dev/null @@ -1,179 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import logging -import unittest - -import mock - - -class Test_entry_from_resource(unittest.TestCase): - @staticmethod - def _call_fut(resource, client, loggers): - from google.cloud.logging._helpers import entry_from_resource - - return entry_from_resource(resource, client, loggers) - - def _payload_helper(self, key, class_name): - import mock - - resource = {} - if key is not None: - resource[key] = "yup" - client = object() - loggers = {} - mock_class = EntryMock() - - name = "google.cloud.logging._helpers." + class_name - with mock.patch(name, new=mock_class): - result = self._call_fut(resource, client, loggers) - - self.assertIs(result, mock_class.sentinel) - self.assertEqual(mock_class.called, (resource, client, loggers)) - - def test_wo_payload(self): - self._payload_helper(None, "LogEntry") - - def test_text_payload(self): - self._payload_helper("textPayload", "TextEntry") - - def test_json_payload(self): - self._payload_helper("jsonPayload", "StructEntry") - - def test_proto_payload(self): - self._payload_helper("protoPayload", "ProtobufEntry") - - -class Test_retrieve_metadata_server(unittest.TestCase): - @staticmethod - def _call_fut(metadata_key): - from google.cloud.logging._helpers import retrieve_metadata_server - - return retrieve_metadata_server(metadata_key) - - def test_metadata_exists(self): - status_code_ok = 200 - response_text = "my-gke-cluster" - metadata_key = "test_key" - - response_mock = ResponseMock(status_code=status_code_ok) - response_mock.text = response_text - - requests_mock = mock.Mock() - requests_mock.get.return_value = response_mock - requests_mock.codes.ok = status_code_ok - - patch = mock.patch("google.cloud.logging._helpers.requests", requests_mock) - - with patch: - metadata = self._call_fut(metadata_key) - - self.assertEqual(metadata, response_text) - - def test_metadata_does_not_exist(self): - status_code_ok = 200 - status_code_not_found = 404 - metadata_key = "test_key" - - response_mock = ResponseMock(status_code=status_code_not_found) - - requests_mock = mock.Mock() - requests_mock.get.return_value = response_mock - requests_mock.codes.ok = status_code_ok - - patch = mock.patch("google.cloud.logging._helpers.requests", requests_mock) - - with patch: - metadata = self._call_fut(metadata_key) - - self.assertIsNone(metadata) - - def test_request_exception(self): - import requests - - metadata_key = "test_url_cannot_connect" - metadata_url = "http://metadata.invalid/" - - requests_get_mock = mock.Mock(spec=["__call__"]) - requests_get_mock.side_effect = requests.exceptions.RequestException - - requests_get_patch = mock.patch("requests.get", requests_get_mock) - - url_patch = mock.patch( - "google.cloud.logging._helpers.METADATA_URL", new=metadata_url - ) - - with requests_get_patch: - with url_patch: - metadata = self._call_fut(metadata_key) - - self.assertIsNone(metadata) - - -class Test__normalize_severity(unittest.TestCase): - @staticmethod - def _stackdriver_severity(): - from google.cloud.logging._helpers import LogSeverity - - return LogSeverity - - def _normalize_severity_helper(self, stdlib_level, enum_level): - from google.cloud.logging._helpers import _normalize_severity - - self.assertEqual(_normalize_severity(stdlib_level), enum_level) - - def test__normalize_severity_critical(self): - severity = self._stackdriver_severity() - self._normalize_severity_helper(logging.CRITICAL, severity.CRITICAL) - - def test__normalize_severity_error(self): - severity = self._stackdriver_severity() - self._normalize_severity_helper(logging.ERROR, severity.ERROR) - - def test__normalize_severity_warning(self): - severity = self._stackdriver_severity() - self._normalize_severity_helper(logging.WARNING, severity.WARNING) - - def test__normalize_severity_info(self): - severity = self._stackdriver_severity() - self._normalize_severity_helper(logging.INFO, severity.INFO) - - def test__normalize_severity_debug(self): - severity = self._stackdriver_severity() - self._normalize_severity_helper(logging.DEBUG, severity.DEBUG) - - def test__normalize_severity_notset(self): - severity = self._stackdriver_severity() - self._normalize_severity_helper(logging.NOTSET, severity.DEFAULT) - - def test__normalize_severity_non_standard(self): - unknown_level = 35 - self._normalize_severity_helper(unknown_level, unknown_level) - - -class EntryMock(object): - def __init__(self): - self.sentinel = object() - self.called = None - - def from_api_repr(self, resource, client, loggers): - self.called = (resource, client, loggers) - return self.sentinel - - -class ResponseMock(object): - def __init__(self, status_code, text="test_response_text"): - self.status_code = status_code - self.text = text diff --git a/logging/tests/unit/test__http.py b/logging/tests/unit/test__http.py deleted file mode 100644 index 4ffc1cfebd1c..000000000000 --- a/logging/tests/unit/test__http.py +++ /dev/null @@ -1,852 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class TestConnection(unittest.TestCase): - - PROJECT = "project" - FILTER = "logName:syslog AND severity>=ERROR" - - @staticmethod - def _get_target_class(): - from google.cloud.logging._http import Connection - - return Connection - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_default_url(self): - client = object() - conn = self._make_one(client) - self.assertIs(conn._client, client) - - def test_build_api_url_w_custom_endpoint(self): - custom_endpoint = "https://foo-logging.googleapis.com" - conn = self._make_one(object(), api_endpoint=custom_endpoint) - URI = "/".join([custom_endpoint, conn.API_VERSION, "foo"]) - self.assertEqual(conn.build_api_url("/foo"), URI) - - def test_extra_headers(self): - import requests - from google.cloud import _http as base_http - - http = mock.create_autospec(requests.Session, instance=True) - response = requests.Response() - response.status_code = 200 - data = b"brent-spiner" - response._content = data - http.request.return_value = response - client = mock.Mock(_http=http, spec=["_http"]) - - conn = self._make_one(client) - req_data = "req-data-boring" - result = conn.api_request("GET", "/rainbow", data=req_data, expect_json=False) - self.assertEqual(result, data) - - expected_headers = { - "Accept-Encoding": "gzip", - base_http.CLIENT_INFO_HEADER: conn.user_agent, - "User-Agent": conn.user_agent, - } - expected_uri = conn.build_api_url("/rainbow") - http.request.assert_called_once_with( - data=req_data, - headers=expected_headers, - method="GET", - url=expected_uri, - timeout=None, - ) - - -class Test_LoggingAPI(unittest.TestCase): - - PROJECT = "project" - LIST_ENTRIES_PATH = "entries:list" - WRITE_ENTRIES_PATH = "entries:write" - LOGGER_NAME = "LOGGER_NAME" - FILTER = "logName:syslog AND severity>=ERROR" - - @staticmethod - def _get_target_class(): - from google.cloud.logging._http import _LoggingAPI - - return _LoggingAPI - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - connection = _Connection() - client = _Client(connection) - api = self._make_one(client) - self.assertIs(api._client, client) - self.assertEqual(api.api_request, connection.api_request) - - @staticmethod - def _make_timestamp(): - import datetime - from google.cloud._helpers import UTC - - NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) - return NOW, _datetime_to_rfc3339_w_nanos(NOW) - - def test_list_entries_no_paging(self): - import six - from google.cloud.logging.client import Client - from google.cloud.logging.entries import TextEntry - from google.cloud.logging.logger import Logger - - NOW, TIMESTAMP = self._make_timestamp() - IID = "IID" - TEXT = "TEXT" - SENT = {"projectIds": [self.PROJECT]} - TOKEN = "TOKEN" - RETURNED = { - "entries": [ - { - "textPayload": TEXT, - "insertId": IID, - "resource": {"type": "global"}, - "timestamp": TIMESTAMP, - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - } - ], - "nextPageToken": TOKEN, - } - client = Client( - project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False - ) - client._connection = _Connection(RETURNED) - api = self._make_one(client) - - iterator = api.list_entries([self.PROJECT]) - page = six.next(iterator.pages) - entries = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the entries returned. - self.assertEqual(len(entries), 1) - entry = entries[0] - self.assertIsInstance(entry, TextEntry) - self.assertEqual(entry.payload, TEXT) - self.assertIsInstance(entry.logger, Logger) - self.assertEqual(entry.logger.name, self.LOGGER_NAME) - self.assertEqual(entry.insert_id, IID) - self.assertEqual(entry.timestamp, NOW) - self.assertIsNone(entry.labels) - self.assertIsNone(entry.severity) - self.assertIsNone(entry.http_request) - - called_with = client._connection._called_with - expected_path = "/%s" % (self.LIST_ENTRIES_PATH,) - self.assertEqual( - called_with, {"method": "POST", "path": expected_path, "data": SENT} - ) - - def test_list_entries_w_paging(self): - from google.cloud.logging import DESCENDING - from google.cloud.logging.client import Client - from google.cloud.logging.logger import Logger - from google.cloud.logging.entries import ProtobufEntry - from google.cloud.logging.entries import StructEntry - - PROJECT1 = "PROJECT1" - PROJECT2 = "PROJECT2" - NOW, TIMESTAMP = self._make_timestamp() - IID1 = "IID1" - IID2 = "IID2" - PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} - PROTO_PAYLOAD = PAYLOAD.copy() - PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" - TOKEN = "TOKEN" - PAGE_SIZE = 42 - SENT = { - "projectIds": [PROJECT1, PROJECT2], - "filter": self.FILTER, - "orderBy": DESCENDING, - "pageSize": PAGE_SIZE, - "pageToken": TOKEN, - } - RETURNED = { - "entries": [ - { - "jsonPayload": PAYLOAD, - "insertId": IID1, - "resource": {"type": "global"}, - "timestamp": TIMESTAMP, - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - }, - { - "protoPayload": PROTO_PAYLOAD, - "insertId": IID2, - "resource": {"type": "global"}, - "timestamp": TIMESTAMP, - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - }, - ] - } - client = Client( - project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False - ) - client._connection = _Connection(RETURNED) - api = self._make_one(client) - - iterator = api.list_entries( - projects=[PROJECT1, PROJECT2], - filter_=self.FILTER, - order_by=DESCENDING, - page_size=PAGE_SIZE, - page_token=TOKEN, - ) - entries = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the entries returned. - self.assertEqual(len(entries), 2) - entry1 = entries[0] - self.assertIsInstance(entry1, StructEntry) - self.assertEqual(entry1.payload, PAYLOAD) - self.assertIsInstance(entry1.logger, Logger) - self.assertEqual(entry1.logger.name, self.LOGGER_NAME) - self.assertEqual(entry1.insert_id, IID1) - self.assertEqual(entry1.timestamp, NOW) - self.assertIsNone(entry1.labels) - self.assertIsNone(entry1.severity) - self.assertIsNone(entry1.http_request) - - entry2 = entries[1] - self.assertIsInstance(entry2, ProtobufEntry) - self.assertEqual(entry2.payload, PROTO_PAYLOAD) - self.assertIsInstance(entry2.logger, Logger) - self.assertEqual(entry2.logger.name, self.LOGGER_NAME) - self.assertEqual(entry2.insert_id, IID2) - self.assertEqual(entry2.timestamp, NOW) - self.assertIsNone(entry2.labels) - self.assertIsNone(entry2.severity) - self.assertIsNone(entry2.http_request) - - called_with = client._connection._called_with - expected_path = "/%s" % (self.LIST_ENTRIES_PATH,) - self.assertEqual( - called_with, {"method": "POST", "path": expected_path, "data": SENT} - ) - - def test_write_entries_single(self): - TEXT = "TEXT" - ENTRY = { - "textPayload": TEXT, - "resource": {"type": "global"}, - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - } - SENT = {"entries": [ENTRY]} - conn = _Connection({}) - client = _Client(conn) - api = self._make_one(client) - - api.write_entries([ENTRY]) - - self.assertEqual(conn._called_with["method"], "POST") - path = "/%s" % self.WRITE_ENTRIES_PATH - self.assertEqual(conn._called_with["path"], path) - self.assertEqual(conn._called_with["data"], SENT) - - def test_write_entries_multiple(self): - TEXT = "TEXT" - LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) - RESOURCE = {"type": "global"} - LABELS = {"baz": "qux", "spam": "eggs"} - ENTRY1 = {"textPayload": TEXT} - ENTRY2 = {"jsonPayload": {"foo": "bar"}} - SENT = { - "logName": LOG_NAME, - "resource": RESOURCE, - "labels": LABELS, - "entries": [ENTRY1, ENTRY2], - } - conn = _Connection({}) - client = _Client(conn) - api = self._make_one(client) - - api.write_entries([ENTRY1, ENTRY2], LOG_NAME, RESOURCE, LABELS) - - self.assertEqual(conn._called_with["method"], "POST") - path = "/%s" % self.WRITE_ENTRIES_PATH - self.assertEqual(conn._called_with["path"], path) - self.assertEqual(conn._called_with["data"], SENT) - - def test_logger_delete(self): - path = "/projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) - conn = _Connection({}) - client = _Client(conn) - api = self._make_one(client) - - api.logger_delete(self.PROJECT, self.LOGGER_NAME) - - self.assertEqual(conn._called_with["method"], "DELETE") - self.assertEqual(conn._called_with["path"], path) - - -class Test_SinksAPI(unittest.TestCase): - - PROJECT = "project" - FILTER = "logName:syslog AND severity>=ERROR" - LIST_SINKS_PATH = "projects/%s/sinks" % (PROJECT,) - SINK_NAME = "sink_name" - SINK_PATH = "projects/%s/sinks/%s" % (PROJECT, SINK_NAME) - DESTINATION_URI = "faux.googleapis.com/destination" - WRITER_IDENTITY = "serviceAccount:project-123@example.com" - - @staticmethod - def _get_target_class(): - from google.cloud.logging._http import _SinksAPI - - return _SinksAPI - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - connection = _Connection() - client = _Client(connection) - api = self._make_one(client) - self.assertIs(api._client, client) - self.assertEqual(api.api_request, connection.api_request) - - def test_list_sinks_no_paging(self): - import six - from google.cloud.logging.sink import Sink - - TOKEN = "TOKEN" - RETURNED = { - "sinks": [ - { - "name": self.SINK_PATH, - "filter": self.FILTER, - "destination": self.DESTINATION_URI, - } - ], - "nextPageToken": TOKEN, - } - conn = _Connection(RETURNED) - client = _Client(conn) - api = self._make_one(client) - - iterator = api.list_sinks(self.PROJECT) - page = six.next(iterator.pages) - sinks = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the sinks returned. - self.assertEqual(len(sinks), 1) - sink = sinks[0] - self.assertIsInstance(sink, Sink) - self.assertEqual(sink.name, self.SINK_PATH) - self.assertEqual(sink.filter_, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIs(sink.client, client) - - called_with = conn._called_with - path = "/%s" % (self.LIST_SINKS_PATH,) - self.assertEqual( - called_with, {"method": "GET", "path": path, "query_params": {}} - ) - - def test_list_sinks_w_paging(self): - from google.cloud.logging.sink import Sink - - TOKEN = "TOKEN" - PAGE_SIZE = 42 - RETURNED = { - "sinks": [ - { - "name": self.SINK_PATH, - "filter": self.FILTER, - "destination": self.DESTINATION_URI, - } - ] - } - conn = _Connection(RETURNED) - client = _Client(conn) - api = self._make_one(client) - - iterator = api.list_sinks(self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) - sinks = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the sinks returned. - self.assertEqual(len(sinks), 1) - sink = sinks[0] - self.assertIsInstance(sink, Sink) - self.assertEqual(sink.name, self.SINK_PATH) - self.assertEqual(sink.filter_, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIs(sink.client, client) - - called_with = conn._called_with - path = "/%s" % (self.LIST_SINKS_PATH,) - self.assertEqual( - called_with, - { - "method": "GET", - "path": path, - "query_params": {"pageSize": PAGE_SIZE, "pageToken": TOKEN}, - }, - ) - - def test_sink_create_conflict(self): - from google.cloud.exceptions import Conflict - - sent = { - "name": self.SINK_NAME, - "filter": self.FILTER, - "destination": self.DESTINATION_URI, - } - conn = _Connection() - conn._raise_conflict = True - client = _Client(conn) - api = self._make_one(client) - - with self.assertRaises(Conflict): - api.sink_create( - self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI - ) - - path = "/projects/%s/sinks" % (self.PROJECT,) - expected = { - "method": "POST", - "path": path, - "data": sent, - "query_params": {"uniqueWriterIdentity": False}, - } - self.assertEqual(conn._called_with, expected) - - def test_sink_create_ok(self): - sent = { - "name": self.SINK_NAME, - "filter": self.FILTER, - "destination": self.DESTINATION_URI, - } - after_create = sent.copy() - after_create["writerIdentity"] = self.WRITER_IDENTITY - conn = _Connection(after_create) - client = _Client(conn) - api = self._make_one(client) - - returned = api.sink_create( - self.PROJECT, - self.SINK_NAME, - self.FILTER, - self.DESTINATION_URI, - unique_writer_identity=True, - ) - - self.assertEqual(returned, after_create) - path = "/projects/%s/sinks" % (self.PROJECT,) - expected = { - "method": "POST", - "path": path, - "data": sent, - "query_params": {"uniqueWriterIdentity": True}, - } - self.assertEqual(conn._called_with, expected) - - def test_sink_get_miss(self): - from google.cloud.exceptions import NotFound - - conn = _Connection() - client = _Client(conn) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.sink_get(self.PROJECT, self.SINK_NAME) - - self.assertEqual(conn._called_with["method"], "GET") - path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) - self.assertEqual(conn._called_with["path"], path) - - def test_sink_get_hit(self): - RESPONSE = { - "name": self.SINK_PATH, - "filter": self.FILTER, - "destination": self.DESTINATION_URI, - } - conn = _Connection(RESPONSE) - client = _Client(conn) - api = self._make_one(client) - - response = api.sink_get(self.PROJECT, self.SINK_NAME) - - self.assertEqual(response, RESPONSE) - self.assertEqual(conn._called_with["method"], "GET") - path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) - self.assertEqual(conn._called_with["path"], path) - - def test_sink_update_miss(self): - from google.cloud.exceptions import NotFound - - sent = { - "name": self.SINK_NAME, - "filter": self.FILTER, - "destination": self.DESTINATION_URI, - } - conn = _Connection() - client = _Client(conn) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.sink_update( - self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI - ) - - path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) - expected = { - "method": "PUT", - "path": path, - "data": sent, - "query_params": {"uniqueWriterIdentity": False}, - } - self.assertEqual(conn._called_with, expected) - - def test_sink_update_hit(self): - sent = { - "name": self.SINK_NAME, - "filter": self.FILTER, - "destination": self.DESTINATION_URI, - } - after_update = sent.copy() - after_update["writerIdentity"] = self.WRITER_IDENTITY - conn = _Connection(after_update) - client = _Client(conn) - api = self._make_one(client) - - returned = api.sink_update( - self.PROJECT, - self.SINK_NAME, - self.FILTER, - self.DESTINATION_URI, - unique_writer_identity=True, - ) - - self.assertEqual(returned, after_update) - path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) - expected = { - "method": "PUT", - "path": path, - "data": sent, - "query_params": {"uniqueWriterIdentity": True}, - } - self.assertEqual(conn._called_with, expected) - - def test_sink_delete_miss(self): - from google.cloud.exceptions import NotFound - - conn = _Connection() - client = _Client(conn) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.sink_delete(self.PROJECT, self.SINK_NAME) - - self.assertEqual(conn._called_with["method"], "DELETE") - path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) - self.assertEqual(conn._called_with["path"], path) - - def test_sink_delete_hit(self): - conn = _Connection({}) - client = _Client(conn) - api = self._make_one(client) - - api.sink_delete(self.PROJECT, self.SINK_NAME) - - self.assertEqual(conn._called_with["method"], "DELETE") - path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) - self.assertEqual(conn._called_with["path"], path) - - -class Test_MetricsAPI(unittest.TestCase): - - PROJECT = "project" - FILTER = "logName:syslog AND severity>=ERROR" - LIST_METRICS_PATH = "projects/%s/metrics" % (PROJECT,) - METRIC_NAME = "metric_name" - METRIC_PATH = "projects/%s/metrics/%s" % (PROJECT, METRIC_NAME) - DESCRIPTION = "DESCRIPTION" - - @staticmethod - def _get_target_class(): - from google.cloud.logging._http import _MetricsAPI - - return _MetricsAPI - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_list_metrics_no_paging(self): - import six - from google.cloud.logging.metric import Metric - - TOKEN = "TOKEN" - RETURNED = { - "metrics": [{"name": self.METRIC_PATH, "filter": self.FILTER}], - "nextPageToken": TOKEN, - } - conn = _Connection(RETURNED) - client = _Client(conn) - api = self._make_one(client) - - iterator = api.list_metrics(self.PROJECT) - page = six.next(iterator.pages) - metrics = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the metrics returned. - self.assertEqual(len(metrics), 1) - metric = metrics[0] - self.assertIsInstance(metric, Metric) - self.assertEqual(metric.name, self.METRIC_PATH) - self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, "") - self.assertIs(metric.client, client) - - called_with = conn._called_with - path = "/%s" % (self.LIST_METRICS_PATH,) - self.assertEqual( - called_with, {"method": "GET", "path": path, "query_params": {}} - ) - - def test_list_metrics_w_paging(self): - from google.cloud.logging.metric import Metric - - TOKEN = "TOKEN" - PAGE_SIZE = 42 - RETURNED = {"metrics": [{"name": self.METRIC_PATH, "filter": self.FILTER}]} - conn = _Connection(RETURNED) - client = _Client(conn) - api = self._make_one(client) - - iterator = api.list_metrics(self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) - metrics = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the metrics returned. - self.assertEqual(len(metrics), 1) - metric = metrics[0] - self.assertIsInstance(metric, Metric) - self.assertEqual(metric.name, self.METRIC_PATH) - self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, "") - self.assertIs(metric.client, client) - - called_with = conn._called_with - path = "/%s" % (self.LIST_METRICS_PATH,) - self.assertEqual( - called_with, - { - "method": "GET", - "path": path, - "query_params": {"pageSize": PAGE_SIZE, "pageToken": TOKEN}, - }, - ) - - def test_metric_create_conflict(self): - from google.cloud.exceptions import Conflict - - SENT = { - "name": self.METRIC_NAME, - "filter": self.FILTER, - "description": self.DESCRIPTION, - } - conn = _Connection() - conn._raise_conflict = True - client = _Client(conn) - api = self._make_one(client) - - with self.assertRaises(Conflict): - api.metric_create( - self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION - ) - - self.assertEqual(conn._called_with["method"], "POST") - path = "/projects/%s/metrics" % (self.PROJECT,) - self.assertEqual(conn._called_with["path"], path) - self.assertEqual(conn._called_with["data"], SENT) - - def test_metric_create_ok(self): - SENT = { - "name": self.METRIC_NAME, - "filter": self.FILTER, - "description": self.DESCRIPTION, - } - conn = _Connection({}) - client = _Client(conn) - api = self._make_one(client) - - api.metric_create(self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) - - self.assertEqual(conn._called_with["method"], "POST") - path = "/projects/%s/metrics" % (self.PROJECT,) - self.assertEqual(conn._called_with["path"], path) - self.assertEqual(conn._called_with["data"], SENT) - - def test_metric_get_miss(self): - from google.cloud.exceptions import NotFound - - conn = _Connection() - client = _Client(conn) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.metric_get(self.PROJECT, self.METRIC_NAME) - - self.assertEqual(conn._called_with["method"], "GET") - path = "/projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with["path"], path) - - def test_metric_get_hit(self): - RESPONSE = { - "name": self.METRIC_NAME, - "filter": self.FILTER, - "description": self.DESCRIPTION, - } - conn = _Connection(RESPONSE) - client = _Client(conn) - api = self._make_one(client) - - response = api.metric_get(self.PROJECT, self.METRIC_NAME) - - self.assertEqual(response, RESPONSE) - self.assertEqual(conn._called_with["method"], "GET") - path = "/projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with["path"], path) - - def test_metric_update_miss(self): - from google.cloud.exceptions import NotFound - - SENT = { - "name": self.METRIC_NAME, - "filter": self.FILTER, - "description": self.DESCRIPTION, - } - conn = _Connection() - client = _Client(conn) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.metric_update( - self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION - ) - - self.assertEqual(conn._called_with["method"], "PUT") - path = "/projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with["path"], path) - self.assertEqual(conn._called_with["data"], SENT) - - def test_metric_update_hit(self): - SENT = { - "name": self.METRIC_NAME, - "filter": self.FILTER, - "description": self.DESCRIPTION, - } - conn = _Connection({}) - client = _Client(conn) - api = self._make_one(client) - - api.metric_update(self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) - - self.assertEqual(conn._called_with["method"], "PUT") - path = "/projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with["path"], path) - self.assertEqual(conn._called_with["data"], SENT) - - def test_metric_delete_miss(self): - from google.cloud.exceptions import NotFound - - conn = _Connection() - client = _Client(conn) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.metric_delete(self.PROJECT, self.METRIC_NAME) - - self.assertEqual(conn._called_with["method"], "DELETE") - path = "/projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with["path"], path) - - def test_metric_delete_hit(self): - conn = _Connection({}) - client = _Client(conn) - api = self._make_one(client) - - api.metric_delete(self.PROJECT, self.METRIC_NAME) - - self.assertEqual(conn._called_with["method"], "DELETE") - path = "/projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with["path"], path) - - -class _Connection(object): - - _called_with = None - _raise_conflict = False - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - from google.cloud.exceptions import Conflict - from google.cloud.exceptions import NotFound - - self._called_with = kw - if self._raise_conflict: - raise Conflict("oops") - try: - response, self._responses = self._responses[0], self._responses[1:] - except IndexError: - raise NotFound("miss") - return response - - -def _datetime_to_rfc3339_w_nanos(value): - from google.cloud._helpers import _RFC3339_NO_FRACTION - - no_fraction = value.strftime(_RFC3339_NO_FRACTION) - return "%s.%09dZ" % (no_fraction, value.microsecond * 1000) - - -class _Client(object): - def __init__(self, connection): - self._connection = connection diff --git a/logging/tests/unit/test_client.py b/logging/tests/unit/test_client.py deleted file mode 100644 index 4e0b5ca22f0d..000000000000 --- a/logging/tests/unit/test_client.py +++ /dev/null @@ -1,738 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class TestClient(unittest.TestCase): - - PROJECT = "PROJECT" - LOGGER_NAME = "LOGGER_NAME" - SINK_NAME = "SINK_NAME" - FILTER = "logName:syslog AND severity>=ERROR" - DESTINATION_URI = "faux.googleapis.com/destination" - METRIC_NAME = "metric_name" - FILTER = "logName:syslog AND severity>=ERROR" - DESCRIPTION = "DESCRIPTION" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.client import Client - - return Client - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - from google.cloud._http import ClientInfo - from google.cloud.logging._http import Connection - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - self.assertEqual(client.project, self.PROJECT) - self.assertIsInstance(client._connection, Connection) - self.assertIsInstance(client._connection._client_info, ClientInfo) - - def test_ctor_explicit(self): - from google.cloud._http import ClientInfo - from google.cloud.logging._http import Connection - - creds = _make_credentials() - client_info = ClientInfo() - client = self._make_one( - project=self.PROJECT, credentials=creds, client_info=client_info - ) - self.assertEqual(client.project, self.PROJECT) - self.assertIs(client._client_info, client_info) - self.assertIsInstance(client._connection, Connection) - self.assertIs(client._connection._client_info, client_info) - - def test_ctor_w_empty_client_options(self): - from google.api_core.client_options import ClientOptions - - creds = _make_credentials() - client_options = ClientOptions() - client = self._make_one( - project=self.PROJECT, credentials=creds, client_options=client_options - ) - self.assertEqual( - client._connection.API_BASE_URL, client._connection.DEFAULT_API_ENDPOINT - ) - - def test_ctor_w_client_options_object(self): - from google.api_core.client_options import ClientOptions - - creds = _make_credentials() - client_options = ClientOptions( - api_endpoint="https://foo-logging.googleapis.com" - ) - client = self._make_one( - project=self.PROJECT, credentials=creds, client_options=client_options - ) - self.assertEqual( - client._connection.API_BASE_URL, "https://foo-logging.googleapis.com" - ) - - def test_ctor_w_client_options_dict(self): - creds = _make_credentials() - client_options = {"api_endpoint": "https://foo-logging.googleapis.com"} - client = self._make_one( - project=self.PROJECT, credentials=creds, client_options=client_options - ) - self.assertEqual( - client._connection.API_BASE_URL, "https://foo-logging.googleapis.com" - ) - - def test_logging_api_wo_gapic(self): - from google.cloud.logging._http import _LoggingAPI - - client = self._make_one( - self.PROJECT, credentials=_make_credentials(), _use_grpc=False - ) - - conn = client._connection = _Connection() - api = client.logging_api - - self.assertIsInstance(api, _LoggingAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.logging_api - self.assertIs(again, api) - - def test_logging_api_w_gapic(self): - clients = [] - api_obj = object() - - def make_api(client_obj): - clients.append(client_obj) - return api_obj - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - - patch = mock.patch("google.cloud.logging.client._gapic") - with patch as gapic_module: - gapic_module.make_logging_api.side_effect = make_api - api = client.logging_api - - self.assertIs(api, api_obj) - self.assertEqual(clients, [client]) - # API instance is cached - again = client.logging_api - self.assertIs(again, api) - - def test_no_gapic_ctor(self): - from google.cloud.logging._http import _LoggingAPI - - creds = _make_credentials() - patch = mock.patch("google.cloud.logging.client._USE_GRPC", new=True) - with patch: - client = self._make_one( - project=self.PROJECT, credentials=creds, _use_grpc=False - ) - - api = client.logging_api - self.assertIsInstance(api, _LoggingAPI) - - def test_sinks_api_wo_gapic(self): - from google.cloud.logging._http import _SinksAPI - - client = self._make_one( - self.PROJECT, credentials=_make_credentials(), _use_grpc=False - ) - - conn = client._connection = _Connection() - api = client.sinks_api - - self.assertIsInstance(api, _SinksAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.sinks_api - self.assertIs(again, api) - - def test_sinks_api_w_gapic(self): - clients = [] - api_obj = object() - - def make_api(client_obj): - clients.append(client_obj) - return api_obj - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - - patch = mock.patch("google.cloud.logging.client._gapic") - with patch as gapic_module: - gapic_module.make_sinks_api.side_effect = make_api - api = client.sinks_api - - self.assertIs(api, api_obj) - self.assertEqual(clients, [client]) - # API instance is cached - again = client.sinks_api - self.assertIs(again, api) - - def test_metrics_api_wo_gapic(self): - from google.cloud.logging._http import _MetricsAPI - - client = self._make_one( - self.PROJECT, credentials=_make_credentials(), _use_grpc=False - ) - - conn = client._connection = _Connection() - api = client.metrics_api - - self.assertIsInstance(api, _MetricsAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.metrics_api - self.assertIs(again, api) - - def test_metrics_api_w_gapic(self): - clients = [] - api_obj = object() - - def make_api(client_obj): - clients.append(client_obj) - return api_obj - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - - patch = mock.patch("google.cloud.logging.client._gapic") - with patch as gapic_module: - gapic_module.make_metrics_api.side_effect = make_api - api = client.metrics_api - - self.assertIs(api, api_obj) - self.assertEqual(clients, [client]) - # API instance is cached - again = client.metrics_api - self.assertIs(again, api) - - def test_logger(self): - from google.cloud.logging.logger import Logger - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - logger = client.logger(self.LOGGER_NAME) - self.assertIsInstance(logger, Logger) - self.assertEqual(logger.name, self.LOGGER_NAME) - self.assertIs(logger.client, client) - self.assertEqual(logger.project, self.PROJECT) - - def test_list_entries_defaults(self): - import six - from google.cloud.logging.entries import TextEntry - - IID = "IID" - TEXT = "TEXT" - TOKEN = "TOKEN" - ENTRIES = [ - { - "textPayload": TEXT, - "insertId": IID, - "resource": {"type": "global"}, - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - } - ] - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, _use_grpc=False - ) - returned = {"entries": ENTRIES, "nextPageToken": TOKEN} - client._connection = _Connection(returned) - - iterator = client.list_entries() - page = six.next(iterator.pages) - entries = list(page) - token = iterator.next_page_token - - self.assertEqual(len(entries), 1) - entry = entries[0] - self.assertIsInstance(entry, TextEntry) - self.assertEqual(entry.insert_id, IID) - self.assertEqual(entry.payload, TEXT) - logger = entry.logger - self.assertEqual(logger.name, self.LOGGER_NAME) - self.assertIs(logger.client, client) - self.assertEqual(logger.project, self.PROJECT) - self.assertEqual(token, TOKEN) - - called_with = client._connection._called_with - self.assertEqual( - called_with, - { - "path": "/entries:list", - "method": "POST", - "data": {"projectIds": [self.PROJECT]}, - }, - ) - - def test_list_entries_explicit(self): - from google.cloud.logging import DESCENDING - from google.cloud.logging.entries import ProtobufEntry - from google.cloud.logging.entries import StructEntry - from google.cloud.logging.logger import Logger - - PROJECT1 = "PROJECT1" - PROJECT2 = "PROJECT2" - FILTER = "logName:LOGNAME" - IID1 = "IID1" - IID2 = "IID2" - PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} - PROTO_PAYLOAD = PAYLOAD.copy() - PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" - TOKEN = "TOKEN" - PAGE_SIZE = 42 - ENTRIES = [ - { - "jsonPayload": PAYLOAD, - "insertId": IID1, - "resource": {"type": "global"}, - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - }, - { - "protoPayload": PROTO_PAYLOAD, - "insertId": IID2, - "resource": {"type": "global"}, - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - }, - ] - client = self._make_one( - self.PROJECT, credentials=_make_credentials(), _use_grpc=False - ) - returned = {"entries": ENTRIES} - client._connection = _Connection(returned) - - iterator = client.list_entries( - projects=[PROJECT1, PROJECT2], - filter_=FILTER, - order_by=DESCENDING, - page_size=PAGE_SIZE, - page_token=TOKEN, - ) - entries = list(iterator) - token = iterator.next_page_token - - # First, check the token. - self.assertIsNone(token) - # Then check the entries. - self.assertEqual(len(entries), 2) - entry = entries[0] - self.assertIsInstance(entry, StructEntry) - self.assertEqual(entry.insert_id, IID1) - self.assertEqual(entry.payload, PAYLOAD) - logger = entry.logger - self.assertIsInstance(logger, Logger) - self.assertEqual(logger.name, self.LOGGER_NAME) - self.assertIs(logger.client, client) - self.assertEqual(logger.project, self.PROJECT) - - entry = entries[1] - self.assertIsInstance(entry, ProtobufEntry) - self.assertEqual(entry.insert_id, IID2) - self.assertEqual(entry.payload, PROTO_PAYLOAD) - logger = entry.logger - self.assertEqual(logger.name, self.LOGGER_NAME) - self.assertIs(logger.client, client) - self.assertEqual(logger.project, self.PROJECT) - - self.assertIs(entries[0].logger, entries[1].logger) - - called_with = client._connection._called_with - self.assertEqual( - called_with, - { - "path": "/entries:list", - "method": "POST", - "data": { - "filter": FILTER, - "orderBy": DESCENDING, - "pageSize": PAGE_SIZE, - "pageToken": TOKEN, - "projectIds": [PROJECT1, PROJECT2], - }, - }, - ) - - def test_sink_defaults(self): - from google.cloud.logging.sink import Sink - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - sink = client.sink(self.SINK_NAME) - self.assertIsInstance(sink, Sink) - self.assertEqual(sink.name, self.SINK_NAME) - self.assertIsNone(sink.filter_) - self.assertIsNone(sink.destination) - self.assertIs(sink.client, client) - self.assertEqual(sink.project, self.PROJECT) - - def test_sink_explicit(self): - from google.cloud.logging.sink import Sink - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - sink = client.sink(self.SINK_NAME, self.FILTER, self.DESTINATION_URI) - self.assertIsInstance(sink, Sink) - self.assertEqual(sink.name, self.SINK_NAME) - self.assertEqual(sink.filter_, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIs(sink.client, client) - self.assertEqual(sink.project, self.PROJECT) - - def test_list_sinks_no_paging(self): - import six - from google.cloud.logging.sink import Sink - - PROJECT = "PROJECT" - TOKEN = "TOKEN" - SINK_NAME = "sink_name" - FILTER = "logName:syslog AND severity>=ERROR" - SINKS = [ - {"name": SINK_NAME, "filter": FILTER, "destination": self.DESTINATION_URI} - ] - client = self._make_one( - project=PROJECT, credentials=_make_credentials(), _use_grpc=False - ) - returned = {"sinks": SINKS, "nextPageToken": TOKEN} - client._connection = _Connection(returned) - - iterator = client.list_sinks() - page = six.next(iterator.pages) - sinks = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the sinks returned. - self.assertEqual(len(sinks), 1) - sink = sinks[0] - self.assertIsInstance(sink, Sink) - self.assertEqual(sink.name, SINK_NAME) - self.assertEqual(sink.filter_, FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIs(sink.client, client) - - # Verify the mocked transport. - called_with = client._connection._called_with - path = "/projects/%s/sinks" % (self.PROJECT,) - self.assertEqual( - called_with, {"method": "GET", "path": path, "query_params": {}} - ) - - def test_list_sinks_with_paging(self): - from google.cloud.logging.sink import Sink - - PROJECT = "PROJECT" - SINK_NAME = "sink_name" - FILTER = "logName:syslog AND severity>=ERROR" - TOKEN = "TOKEN" - PAGE_SIZE = 42 - SINKS = [ - {"name": SINK_NAME, "filter": FILTER, "destination": self.DESTINATION_URI} - ] - client = self._make_one( - project=PROJECT, credentials=_make_credentials(), _use_grpc=False - ) - returned = {"sinks": SINKS} - client._connection = _Connection(returned) - - iterator = client.list_sinks(PAGE_SIZE, TOKEN) - sinks = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the sinks returned. - self.assertEqual(len(sinks), 1) - sink = sinks[0] - self.assertIsInstance(sink, Sink) - self.assertEqual(sink.name, SINK_NAME) - self.assertEqual(sink.filter_, FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIs(sink.client, client) - - # Verify the mocked transport. - called_with = client._connection._called_with - path = "/projects/%s/sinks" % (self.PROJECT,) - self.assertEqual( - called_with, - { - "method": "GET", - "path": path, - "query_params": {"pageSize": PAGE_SIZE, "pageToken": TOKEN}, - }, - ) - - def test_metric_defaults(self): - from google.cloud.logging.metric import Metric - - creds = _make_credentials() - - client_obj = self._make_one(project=self.PROJECT, credentials=creds) - metric = client_obj.metric(self.METRIC_NAME) - self.assertIsInstance(metric, Metric) - self.assertEqual(metric.name, self.METRIC_NAME) - self.assertIsNone(metric.filter_) - self.assertEqual(metric.description, "") - self.assertIs(metric.client, client_obj) - self.assertEqual(metric.project, self.PROJECT) - - def test_metric_explicit(self): - from google.cloud.logging.metric import Metric - - creds = _make_credentials() - - client_obj = self._make_one(project=self.PROJECT, credentials=creds) - metric = client_obj.metric( - self.METRIC_NAME, self.FILTER, description=self.DESCRIPTION - ) - self.assertIsInstance(metric, Metric) - self.assertEqual(metric.name, self.METRIC_NAME) - self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, self.DESCRIPTION) - self.assertIs(metric.client, client_obj) - self.assertEqual(metric.project, self.PROJECT) - - def test_list_metrics_no_paging(self): - from google.cloud.logging.metric import Metric - - metrics = [ - { - "name": self.METRIC_NAME, - "filter": self.FILTER, - "description": self.DESCRIPTION, - } - ] - client = self._make_one( - project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False - ) - returned = {"metrics": metrics} - client._connection = _Connection(returned) - - # Execute request. - iterator = client.list_metrics() - metrics = list(iterator) - - # Check the metrics returned. - self.assertEqual(len(metrics), 1) - metric = metrics[0] - self.assertIsInstance(metric, Metric) - self.assertEqual(metric.name, self.METRIC_NAME) - self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, self.DESCRIPTION) - self.assertIs(metric.client, client) - - # Verify mocked transport. - called_with = client._connection._called_with - path = "/projects/%s/metrics" % (self.PROJECT,) - self.assertEqual( - called_with, {"method": "GET", "path": path, "query_params": {}} - ) - - def test_list_metrics_with_paging(self): - import six - from google.cloud.logging.metric import Metric - - token = "TOKEN" - next_token = "T00KEN" - page_size = 42 - metrics = [ - { - "name": self.METRIC_NAME, - "filter": self.FILTER, - "description": self.DESCRIPTION, - } - ] - client = self._make_one( - project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False - ) - returned = {"metrics": metrics, "nextPageToken": next_token} - client._connection = _Connection(returned) - - # Execute request. - iterator = client.list_metrics(page_size, token) - page = six.next(iterator.pages) - metrics = list(page) - - # First check the token. - self.assertEqual(iterator.next_page_token, next_token) - # Then check the metrics returned. - self.assertEqual(len(metrics), 1) - metric = metrics[0] - self.assertIsInstance(metric, Metric) - self.assertEqual(metric.name, self.METRIC_NAME) - self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, self.DESCRIPTION) - self.assertIs(metric.client, client) - - # Verify mocked transport. - called_with = client._connection._called_with - path = "/projects/%s/metrics" % (self.PROJECT,) - self.assertEqual( - called_with, - { - "method": "GET", - "path": path, - "query_params": {"pageSize": page_size, "pageToken": token}, - }, - ) - - def test_get_default_handler_app_engine(self): - import os - from google.cloud._testing import _Monkey - from google.cloud.logging.client import _APPENGINE_FLEXIBLE_ENV_VM - from google.cloud.logging.handlers import AppEngineHandler - - credentials = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=credentials, _use_grpc=False - ) - - with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: "True"}): - handler = client.get_default_handler() - - handler.transport.worker.stop() - - self.assertIsInstance(handler, AppEngineHandler) - - def test_get_default_handler_container_engine(self): - from google.cloud.logging.handlers import ContainerEngineHandler - - credentials = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=credentials, _use_grpc=False - ) - - patch = mock.patch( - "google.cloud.logging.client.retrieve_metadata_server", - return_value="test-gke-cluster", - ) - - with patch: - handler = client.get_default_handler() - - self.assertIsInstance(handler, ContainerEngineHandler) - - def test_get_default_handler_general(self): - import io - from google.cloud.logging.handlers import CloudLoggingHandler - from google.cloud.logging.resource import Resource - - name = "test-logger" - resource = Resource("resource_type", {"resource_label": "value"}) - labels = {"handler_label": "value"} - stream = io.BytesIO() - - credentials = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=credentials, _use_grpc=False - ) - - handler = client.get_default_handler( - name=name, resource=resource, labels=labels, stream=stream - ) - - handler.transport.worker.stop() - - self.assertIsInstance(handler, CloudLoggingHandler) - self.assertEqual(handler.name, name) - self.assertEqual(handler.resource, resource) - self.assertEqual(handler.labels, labels) - - def test_setup_logging(self): - from google.cloud.logging.handlers import CloudLoggingHandler - - credentials = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=credentials, _use_grpc=False - ) - - with mock.patch("google.cloud.logging.client.setup_logging") as mocked: - client.setup_logging() - - self.assertEqual(len(mocked.mock_calls), 1) - _, args, kwargs = mocked.mock_calls[0] - - (handler,) = args - self.assertIsInstance(handler, CloudLoggingHandler) - - handler.transport.worker.stop() - - expected_kwargs = { - "excluded_loggers": ("google.cloud", "google.auth", "google_auth_httplib2"), - "log_level": 20, - } - self.assertEqual(kwargs, expected_kwargs) - - def test_setup_logging_w_extra_kwargs(self): - import io - from google.cloud.logging.handlers import CloudLoggingHandler - from google.cloud.logging.resource import Resource - - name = "test-logger" - resource = Resource("resource_type", {"resource_label": "value"}) - labels = {"handler_label": "value"} - stream = io.BytesIO() - - credentials = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=credentials, _use_grpc=False - ) - - with mock.patch("google.cloud.logging.client.setup_logging") as mocked: - client.setup_logging( - name=name, resource=resource, labels=labels, stream=stream - ) - - self.assertEqual(len(mocked.mock_calls), 1) - _, args, kwargs = mocked.mock_calls[0] - - (handler,) = args - self.assertIsInstance(handler, CloudLoggingHandler) - self.assertEqual(handler.name, name) - self.assertEqual(handler.resource, resource) - self.assertEqual(handler.labels, labels) - - handler.transport.worker.stop() - - expected_kwargs = { - "excluded_loggers": ("google.cloud", "google.auth", "google_auth_httplib2"), - "log_level": 20, - } - self.assertEqual(kwargs, expected_kwargs) - - -class _Connection(object): - - _called_with = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - self._called_with = kw - response, self._responses = self._responses[0], self._responses[1:] - return response diff --git a/logging/tests/unit/test_entries.py b/logging/tests/unit/test_entries.py deleted file mode 100644 index 3aad7fbb130c..000000000000 --- a/logging/tests/unit/test_entries.py +++ /dev/null @@ -1,735 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class Test_logger_name_from_path(unittest.TestCase): - def _call_fut(self, path): - from google.cloud.logging.entries import logger_name_from_path - - return logger_name_from_path(path) - - def test_w_simple_name(self): - LOGGER_NAME = "LOGGER_NAME" - PROJECT = "my-project-1234" - PATH = "projects/%s/logs/%s" % (PROJECT, LOGGER_NAME) - logger_name = self._call_fut(PATH) - self.assertEqual(logger_name, LOGGER_NAME) - - def test_w_name_w_all_extras(self): - LOGGER_NAME = "LOGGER_NAME-part.one~part.two%part-three" - PROJECT = "my-project-1234" - PATH = "projects/%s/logs/%s" % (PROJECT, LOGGER_NAME) - logger_name = self._call_fut(PATH) - self.assertEqual(logger_name, LOGGER_NAME) - - -class Test__int_or_none(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.logging.entries import _int_or_none - - return _int_or_none(value) - - def test_w_none(self): - self.assertIsNone(self._call_fut(None)) - - def test_w_int(self): - self.assertEqual(self._call_fut(123), 123) - - def test_w_str(self): - self.assertEqual(self._call_fut("123"), 123) - - -class TestLogEntry(unittest.TestCase): - - PROJECT = "PROJECT" - LOGGER_NAME = "LOGGER_NAME" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.entries import LogEntry - - return LogEntry - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - from google.cloud.logging.entries import _GLOBAL_RESOURCE - - entry = self._make_one() - - self.assertIsNone(entry.log_name) - self.assertIsNone(entry.logger) - self.assertIsNone(entry.labels) - self.assertIsNone(entry.insert_id) - self.assertIsNone(entry.severity) - self.assertIsNone(entry.http_request) - self.assertIsNone(entry.timestamp) - self.assertIs(entry.resource, _GLOBAL_RESOURCE) - self.assertIsNone(entry.trace) - self.assertIsNone(entry.span_id) - self.assertIsNone(entry.trace_sampled) - self.assertIsNone(entry.source_location) - self.assertIsNone(entry.operation) - self.assertIsNone(entry.payload) - - def test_ctor_explicit(self): - import datetime - from google.cloud.logging.resource import Resource - - LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) - IID = "IID" - TIMESTAMP = datetime.datetime.now() - LABELS = {"foo": "bar", "baz": "qux"} - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - resource = Resource(type="global", labels={}) - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - FILE = "my_file.py" - LINE_NO = 123 - FUNCTION = "my_function" - SOURCE_LOCATION = {"file": FILE, "line": LINE_NO, "function": FUNCTION} - OP_ID = "OP_ID" - PRODUCER = "PRODUCER" - OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} - logger = _Logger(self.LOGGER_NAME, self.PROJECT) - - entry = self._make_one( - log_name=LOG_NAME, - logger=logger, - insert_id=IID, - timestamp=TIMESTAMP, - labels=LABELS, - severity=SEVERITY, - http_request=REQUEST, - resource=resource, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - source_location=SOURCE_LOCATION, - operation=OPERATION, - ) - - self.assertEqual(entry.log_name, LOG_NAME) - self.assertIs(entry.logger, logger) - self.assertEqual(entry.insert_id, IID) - self.assertEqual(entry.timestamp, TIMESTAMP) - self.assertEqual(entry.labels, LABELS) - self.assertEqual(entry.severity, SEVERITY) - self.assertEqual(entry.http_request["requestMethod"], METHOD) - self.assertEqual(entry.http_request["requestUrl"], URI) - self.assertEqual(entry.http_request["status"], STATUS) - self.assertEqual(entry.resource, resource) - self.assertEqual(entry.trace, TRACE) - self.assertEqual(entry.span_id, SPANID) - self.assertTrue(entry.trace_sampled) - - source_location = entry.source_location - self.assertEqual(source_location["file"], FILE) - self.assertEqual(source_location["line"], LINE_NO) - self.assertEqual(source_location["function"], FUNCTION) - - self.assertEqual(entry.operation, OPERATION) - self.assertIsNone(entry.payload) - - def test_from_api_repr_missing_data_no_loggers(self): - client = _Client(self.PROJECT) - LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) - API_REPR = {"logName": LOG_NAME} - klass = self._get_target_class() - - entry = klass.from_api_repr(API_REPR, client) - - self.assertEqual(entry.log_name, LOG_NAME) - logger = entry.logger - self.assertIsInstance(logger, _Logger) - self.assertEqual(logger.name, self.LOGGER_NAME) - self.assertIsNone(entry.insert_id) - self.assertIsNone(entry.timestamp) - self.assertIsNone(entry.severity) - self.assertIsNone(entry.http_request) - self.assertIsNone(entry.trace) - self.assertIsNone(entry.span_id) - self.assertIsNone(entry.trace_sampled) - self.assertIsNone(entry.source_location) - self.assertIsNone(entry.operation) - self.assertIs(logger.client, client) - self.assertIsNone(entry.payload) - - def test_from_api_repr_w_loggers_no_logger_match(self): - from datetime import datetime - from google.cloud._helpers import UTC - from google.cloud.logging.resource import Resource - - klass = self._get_target_class() - client = _Client(self.PROJECT) - SEVERITY = "CRITICAL" - IID = "IID" - NOW = datetime.utcnow().replace(tzinfo=UTC) - TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) - LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) - LABELS = {"foo": "bar", "baz": "qux"} - METHOD = "POST" - URI = "https://api.example.com/endpoint" - RESOURCE = Resource( - type="gae_app", - labels={ - "type": "gae_app", - "labels": {"module_id": "default", "version": "test"}, - }, - ) - STATUS = "500" - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - FILE = "my_file.py" - LINE_NO = 123 - FUNCTION = "my_function" - SOURCE_LOCATION = {"file": FILE, "line": str(LINE_NO), "function": FUNCTION} - OP_ID = "OP_ID" - PRODUCER = "PRODUCER" - OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} - API_REPR = { - "logName": LOG_NAME, - "insertId": IID, - "timestamp": TIMESTAMP, - "labels": LABELS, - "severity": SEVERITY, - "httpRequest": { - "requestMethod": METHOD, - "requestUrl": URI, - "status": STATUS, - }, - "resource": RESOURCE._to_dict(), - "trace": TRACE, - "spanId": SPANID, - "traceSampled": True, - "sourceLocation": SOURCE_LOCATION, - "operation": OPERATION, - } - loggers = {} - - entry = klass.from_api_repr(API_REPR, client, loggers=loggers) - - self.assertEqual(entry.log_name, LOG_NAME) - logger = entry.logger - self.assertIsInstance(logger, _Logger) - self.assertEqual(logger.name, self.LOGGER_NAME) - self.assertEqual(entry.insert_id, IID) - self.assertEqual(entry.timestamp, NOW) - self.assertIsNone(entry.received_timestamp) - self.assertEqual(entry.labels, LABELS) - self.assertEqual(entry.severity, SEVERITY) - self.assertEqual(entry.http_request["requestMethod"], METHOD) - self.assertEqual(entry.http_request["requestUrl"], URI) - self.assertEqual(entry.http_request["status"], STATUS) - self.assertIs(logger.client, client) - self.assertEqual(logger.name, self.LOGGER_NAME) - self.assertEqual(loggers, {LOG_NAME: logger}) - self.assertEqual(entry.resource, RESOURCE) - self.assertEqual(entry.trace, TRACE) - self.assertEqual(entry.span_id, SPANID) - self.assertTrue(entry.trace_sampled) - - source_location = entry.source_location - self.assertEqual(source_location["file"], FILE) - self.assertEqual(source_location["line"], LINE_NO) - self.assertEqual(source_location["function"], FUNCTION) - - self.assertEqual(entry.operation, OPERATION) - self.assertIsNone(entry.payload) - - def test_from_api_repr_w_loggers_w_logger_match(self): - from datetime import datetime - from datetime import timedelta - from google.cloud._helpers import UTC - - client = _Client(self.PROJECT) - IID = "IID" - NOW = datetime.utcnow().replace(tzinfo=UTC) - LATER = NOW + timedelta(seconds=1) - TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) - RECEIVED = _datetime_to_rfc3339_w_nanos(LATER) - LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) - LABELS = {"foo": "bar", "baz": "qux"} - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - FILE = "my_file.py" - LINE_NO = 123 - FUNCTION = "my_function" - SOURCE_LOCATION = {"file": FILE, "line": str(LINE_NO), "function": FUNCTION} - OP_ID = "OP_ID" - PRODUCER = "PRODUCER" - OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} - API_REPR = { - "logName": LOG_NAME, - "insertId": IID, - "timestamp": TIMESTAMP, - "receiveTimestamp": RECEIVED, - "labels": LABELS, - "trace": TRACE, - "spanId": SPANID, - "traceSampled": True, - "sourceLocation": SOURCE_LOCATION, - "operation": OPERATION, - } - LOGGER = object() - loggers = {LOG_NAME: LOGGER} - klass = self._get_target_class() - - entry = klass.from_api_repr(API_REPR, client, loggers=loggers) - - self.assertEqual(entry.log_name, LOG_NAME) - self.assertIs(entry.logger, LOGGER) - self.assertEqual(entry.insert_id, IID) - self.assertEqual(entry.timestamp, NOW) - self.assertEqual(entry.received_timestamp, LATER) - self.assertEqual(entry.labels, LABELS) - self.assertEqual(entry.trace, TRACE) - self.assertEqual(entry.span_id, SPANID) - self.assertTrue(entry.trace_sampled) - - source_location = entry.source_location - self.assertEqual(source_location["file"], FILE) - self.assertEqual(source_location["line"], LINE_NO) - self.assertEqual(source_location["function"], FUNCTION) - - self.assertEqual(entry.operation, OPERATION) - self.assertIsNone(entry.payload) - - def test_to_api_repr_w_source_location_no_line(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE - - LOG_NAME = "test.log" - FILE = "my_file.py" - FUNCTION = "my_function" - SOURCE_LOCATION = {"file": FILE, "function": FUNCTION} - entry = self._make_one(log_name=LOG_NAME, source_location=SOURCE_LOCATION) - expected = { - "logName": LOG_NAME, - "resource": _GLOBAL_RESOURCE._to_dict(), - "sourceLocation": {"file": FILE, "line": "0", "function": FUNCTION}, - } - self.assertEqual(entry.to_api_repr(), expected) - - def test_to_api_repr_explicit(self): - import datetime - from google.cloud.logging.resource import Resource - from google.cloud._helpers import _datetime_to_rfc3339 - - LOG_NAME = "test.log" - LABELS = {"foo": "bar", "baz": "qux"} - IID = "IID" - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - RESOURCE = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} - ) - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - FILE = "my_file.py" - LINE = 123 - FUNCTION = "my_function" - SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION} - OP_ID = "OP_ID" - PRODUCER = "PRODUCER" - OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} - expected = { - "logName": LOG_NAME, - "labels": LABELS, - "insertId": IID, - "severity": SEVERITY, - "httpRequest": REQUEST, - "timestamp": _datetime_to_rfc3339(TIMESTAMP), - "resource": RESOURCE._to_dict(), - "trace": TRACE, - "spanId": SPANID, - "traceSampled": True, - "sourceLocation": {"file": FILE, "line": str(LINE), "function": FUNCTION}, - "operation": OPERATION, - } - entry = self._make_one( - log_name=LOG_NAME, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - source_location=SOURCE_LOCATION, - operation=OPERATION, - ) - - self.assertEqual(entry.to_api_repr(), expected) - - -class TestTextEntry(unittest.TestCase): - - PROJECT = "PROJECT" - LOGGER_NAME = "LOGGER_NAME" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.entries import TextEntry - - return TextEntry - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_to_api_repr_defaults(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE - - LOG_NAME = "test.log" - TEXT = "TESTING" - entry = self._make_one(log_name=LOG_NAME, payload=TEXT) - expected = { - "logName": LOG_NAME, - "textPayload": TEXT, - "resource": _GLOBAL_RESOURCE._to_dict(), - } - self.assertEqual(entry.to_api_repr(), expected) - - def test_to_api_repr_explicit(self): - import datetime - from google.cloud.logging.resource import Resource - from google.cloud._helpers import _datetime_to_rfc3339 - - LOG_NAME = "test.log" - TEXT = "This is the entry text" - LABELS = {"foo": "bar", "baz": "qux"} - IID = "IID" - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - RESOURCE = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} - ) - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - FILE = "my_file.py" - LINE = 123 - FUNCTION = "my_function" - SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION} - OP_ID = "OP_ID" - PRODUCER = "PRODUCER" - OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} - expected = { - "logName": LOG_NAME, - "textPayload": TEXT, - "labels": LABELS, - "insertId": IID, - "severity": SEVERITY, - "httpRequest": REQUEST, - "timestamp": _datetime_to_rfc3339(TIMESTAMP), - "resource": RESOURCE._to_dict(), - "trace": TRACE, - "spanId": SPANID, - "traceSampled": True, - "sourceLocation": {"file": FILE, "line": str(LINE), "function": FUNCTION}, - "operation": OPERATION, - } - entry = self._make_one( - log_name=LOG_NAME, - payload=TEXT, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - source_location=SOURCE_LOCATION, - operation=OPERATION, - ) - - self.assertEqual(entry.to_api_repr(), expected) - - -class TestStructEntry(unittest.TestCase): - - PROJECT = "PROJECT" - LOGGER_NAME = "LOGGER_NAME" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.entries import StructEntry - - return StructEntry - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_to_api_repr_defaults(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE - - LOG_NAME = "test.log" - JSON_PAYLOAD = {"key": "value"} - entry = self._make_one(log_name=LOG_NAME, payload=JSON_PAYLOAD) - expected = { - "logName": LOG_NAME, - "jsonPayload": JSON_PAYLOAD, - "resource": _GLOBAL_RESOURCE._to_dict(), - } - self.assertEqual(entry.to_api_repr(), expected) - - def test_to_api_repr_explicit(self): - import datetime - from google.cloud.logging.resource import Resource - from google.cloud._helpers import _datetime_to_rfc3339 - - LOG_NAME = "test.log" - JSON_PAYLOAD = {"key": "value"} - LABELS = {"foo": "bar", "baz": "qux"} - IID = "IID" - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - RESOURCE = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} - ) - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - FILE = "my_file.py" - LINE = 123 - FUNCTION = "my_function" - SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION} - OP_ID = "OP_ID" - PRODUCER = "PRODUCER" - OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} - expected = { - "logName": LOG_NAME, - "jsonPayload": JSON_PAYLOAD, - "labels": LABELS, - "insertId": IID, - "severity": SEVERITY, - "httpRequest": REQUEST, - "timestamp": _datetime_to_rfc3339(TIMESTAMP), - "resource": RESOURCE._to_dict(), - "trace": TRACE, - "spanId": SPANID, - "traceSampled": True, - "sourceLocation": {"file": FILE, "line": str(LINE), "function": FUNCTION}, - "operation": OPERATION, - } - entry = self._make_one( - log_name=LOG_NAME, - payload=JSON_PAYLOAD, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - source_location=SOURCE_LOCATION, - operation=OPERATION, - ) - - self.assertEqual(entry.to_api_repr(), expected) - - -class TestProtobufEntry(unittest.TestCase): - - PROJECT = "PROJECT" - LOGGER_NAME = "LOGGER_NAME" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.entries import ProtobufEntry - - return ProtobufEntry - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_constructor_basic(self): - payload = {"foo": "bar"} - - pb_entry = self._make_one(payload=payload, logger=mock.sentinel.logger) - - self.assertIs(pb_entry.payload, payload) - self.assertIsNone(pb_entry.payload_pb) - self.assertIs(pb_entry.payload_json, payload) - self.assertIs(pb_entry.logger, mock.sentinel.logger) - self.assertIsNone(pb_entry.insert_id) - self.assertIsNone(pb_entry.timestamp) - self.assertIsNone(pb_entry.labels) - self.assertIsNone(pb_entry.severity) - self.assertIsNone(pb_entry.http_request) - self.assertIsNone(pb_entry.trace) - self.assertIsNone(pb_entry.span_id) - self.assertIsNone(pb_entry.trace_sampled) - self.assertIsNone(pb_entry.source_location) - - def test_constructor_with_any(self): - from google.protobuf.any_pb2 import Any - - payload = Any() - - pb_entry = self._make_one(payload=payload, logger=mock.sentinel.logger) - - self.assertIs(pb_entry.payload, payload) - self.assertIs(pb_entry.payload_pb, payload) - self.assertIsNone(pb_entry.payload_json) - self.assertIs(pb_entry.logger, mock.sentinel.logger) - self.assertIsNone(pb_entry.insert_id) - self.assertIsNone(pb_entry.timestamp) - self.assertIsNone(pb_entry.labels) - self.assertIsNone(pb_entry.severity) - self.assertIsNone(pb_entry.http_request) - self.assertIsNone(pb_entry.trace) - self.assertIsNone(pb_entry.span_id) - self.assertIsNone(pb_entry.trace_sampled) - self.assertIsNone(pb_entry.source_location) - - def test_parse_message(self): - import json - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct, Value - - message = Struct(fields={"foo": Value(bool_value=False)}) - with_true = Struct(fields={"foo": Value(bool_value=True)}) - payload = json.loads(MessageToJson(with_true)) - entry = self._make_one(payload=payload, logger=mock.sentinel.logger) - - entry.parse_message(message) - - self.assertTrue(message.fields["foo"]) - - def test_to_api_repr_proto_defaults(self): - from google.protobuf.json_format import MessageToDict - from google.cloud.logging.logger import _GLOBAL_RESOURCE - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - LOG_NAME = "test.log" - message = Struct(fields={"foo": Value(bool_value=True)}) - - entry = self._make_one(log_name=LOG_NAME, payload=message) - expected = { - "logName": LOG_NAME, - "protoPayload": MessageToDict(message), - "resource": _GLOBAL_RESOURCE._to_dict(), - } - self.assertEqual(entry.to_api_repr(), expected) - - def test_to_api_repr_proto_explicit(self): - import datetime - from google.protobuf.json_format import MessageToDict - from google.cloud.logging.resource import Resource - from google.cloud._helpers import _datetime_to_rfc3339 - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - LOG_NAME = "test.log" - message = Struct(fields={"foo": Value(bool_value=True)}) - LABELS = {"foo": "bar", "baz": "qux"} - IID = "IID" - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - RESOURCE = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} - ) - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - FILE = "my_file.py" - LINE = 123 - FUNCTION = "my_function" - SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION} - OP_ID = "OP_ID" - PRODUCER = "PRODUCER" - OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} - expected = { - "logName": LOG_NAME, - "protoPayload": MessageToDict(message), - "labels": LABELS, - "insertId": IID, - "severity": SEVERITY, - "httpRequest": REQUEST, - "timestamp": _datetime_to_rfc3339(TIMESTAMP), - "resource": RESOURCE._to_dict(), - "trace": TRACE, - "spanId": SPANID, - "traceSampled": True, - "sourceLocation": {"file": FILE, "line": str(LINE), "function": FUNCTION}, - "operation": OPERATION, - } - - entry = self._make_one( - log_name=LOG_NAME, - payload=message, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - source_location=SOURCE_LOCATION, - operation=OPERATION, - ) - - self.assertEqual(entry.to_api_repr(), expected) - - -def _datetime_to_rfc3339_w_nanos(value): - from google.cloud._helpers import _RFC3339_NO_FRACTION - - no_fraction = value.strftime(_RFC3339_NO_FRACTION) - return "%s.%09dZ" % (no_fraction, value.microsecond * 1000) - - -class _Logger(object): - def __init__(self, name, client): - self.name = name - self.client = client - - -class _Client(object): - def __init__(self, project): - self.project = project - - def logger(self, name): - return _Logger(name, self) diff --git a/logging/tests/unit/test_logger.py b/logging/tests/unit/test_logger.py deleted file mode 100644 index 5bf6a706815f..000000000000 --- a/logging/tests/unit/test_logger.py +++ /dev/null @@ -1,1140 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class TestLogger(unittest.TestCase): - - PROJECT = "test-project" - LOGGER_NAME = "logger-name" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.logger import Logger - - return Logger - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - conn = object() - client = _Client(self.PROJECT, conn) - logger = self._make_one(self.LOGGER_NAME, client=client) - self.assertEqual(logger.name, self.LOGGER_NAME) - self.assertIs(logger.client, client) - self.assertEqual(logger.project, self.PROJECT) - self.assertEqual( - logger.full_name, "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) - ) - self.assertEqual( - logger.path, "/projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) - ) - self.assertIsNone(logger.labels) - - def test_ctor_explicit(self): - LABELS = {"foo": "bar", "baz": "qux"} - conn = object() - client = _Client(self.PROJECT, conn) - logger = self._make_one(self.LOGGER_NAME, client=client, labels=LABELS) - self.assertEqual(logger.name, self.LOGGER_NAME) - self.assertIs(logger.client, client) - self.assertEqual(logger.project, self.PROJECT) - self.assertEqual( - logger.full_name, "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) - ) - self.assertEqual( - logger.path, "/projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) - ) - self.assertEqual(logger.labels, LABELS) - - def test_batch_w_bound_client(self): - from google.cloud.logging.logger import Batch - - conn = object() - client = _Client(self.PROJECT, conn) - logger = self._make_one(self.LOGGER_NAME, client=client) - batch = logger.batch() - self.assertIsInstance(batch, Batch) - self.assertIs(batch.logger, logger) - self.assertIs(batch.client, client) - - def test_batch_w_alternate_client(self): - from google.cloud.logging.logger import Batch - - conn1 = object() - conn2 = object() - client1 = _Client(self.PROJECT, conn1) - client2 = _Client(self.PROJECT, conn2) - logger = self._make_one(self.LOGGER_NAME, client=client1) - batch = logger.batch(client2) - self.assertIsInstance(batch, Batch) - self.assertIs(batch.logger, logger) - self.assertIs(batch.client, client2) - - def test_log_empty_defaults_w_default_labels(self): - DEFAULT_LABELS = {"foo": "spam"} - ENTRIES = [ - { - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "resource": {"type": "global", "labels": {}}, - "labels": DEFAULT_LABELS, - } - ] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) - - logger.log_empty() - - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - - def test_log_empty_w_explicit(self): - import datetime - from google.cloud.logging.resource import Resource - - ALT_LOG_NAME = "projects/foo/logs/alt.log.name" - DEFAULT_LABELS = {"foo": "spam"} - LABELS = {"foo": "bar", "baz": "qux"} - IID = "IID" - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - RESOURCE = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} - ) - ENTRIES = [ - { - "logName": ALT_LOG_NAME, - "labels": LABELS, - "insertId": IID, - "severity": SEVERITY, - "httpRequest": REQUEST, - "timestamp": "2016-12-31T00:01:02.999999Z", - "resource": RESOURCE._to_dict(), - "trace": TRACE, - "spanId": SPANID, - "traceSampled": True, - } - ] - client1 = _Client(self.PROJECT) - client2 = _Client(self.PROJECT) - api = client2.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) - - logger.log_empty( - log_name=ALT_LOG_NAME, - client=client2, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - ) - - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - - def test_log_text_defaults(self): - TEXT = "TEXT" - ENTRIES = [ - { - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "textPayload": TEXT, - "resource": {"type": "global", "labels": {}}, - } - ] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client) - - logger.log_text(TEXT) - - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - - def test_log_text_w_unicode_and_default_labels(self): - TEXT = u"TEXT" - DEFAULT_LABELS = {"foo": "spam"} - ENTRIES = [ - { - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "textPayload": TEXT, - "resource": {"type": "global", "labels": {}}, - "labels": DEFAULT_LABELS, - } - ] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) - - logger.log_text(TEXT) - - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - - def test_log_text_explicit(self): - import datetime - from google.cloud.logging.resource import Resource - - ALT_LOG_NAME = "projects/foo/logs/alt.log.name" - TEXT = "TEXT" - DEFAULT_LABELS = {"foo": "spam"} - LABELS = {"foo": "bar", "baz": "qux"} - IID = "IID" - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - RESOURCE = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} - ) - ENTRIES = [ - { - "logName": ALT_LOG_NAME, - "textPayload": TEXT, - "labels": LABELS, - "insertId": IID, - "severity": SEVERITY, - "httpRequest": REQUEST, - "timestamp": "2016-12-31T00:01:02.999999Z", - "resource": RESOURCE._to_dict(), - "trace": TRACE, - "spanId": SPANID, - "traceSampled": True, - } - ] - client1 = _Client(self.PROJECT) - client2 = _Client(self.PROJECT) - api = client2.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) - - logger.log_text( - TEXT, - log_name=ALT_LOG_NAME, - client=client2, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - ) - - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - - def test_log_struct_defaults(self): - STRUCT = {"message": "MESSAGE", "weather": "cloudy"} - ENTRIES = [ - { - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "jsonPayload": STRUCT, - "resource": {"type": "global", "labels": {}}, - } - ] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client) - - logger.log_struct(STRUCT) - - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - - def test_log_struct_w_default_labels(self): - STRUCT = {"message": "MESSAGE", "weather": "cloudy"} - DEFAULT_LABELS = {"foo": "spam"} - ENTRIES = [ - { - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "jsonPayload": STRUCT, - "resource": {"type": "global", "labels": {}}, - "labels": DEFAULT_LABELS, - } - ] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) - - logger.log_struct(STRUCT) - - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - - def test_log_struct_w_explicit(self): - import datetime - from google.cloud.logging.resource import Resource - - ALT_LOG_NAME = "projects/foo/logs/alt.log.name" - STRUCT = {"message": "MESSAGE", "weather": "cloudy"} - DEFAULT_LABELS = {"foo": "spam"} - LABELS = {"foo": "bar", "baz": "qux"} - IID = "IID" - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - RESOURCE = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} - ) - ENTRIES = [ - { - "logName": ALT_LOG_NAME, - "jsonPayload": STRUCT, - "labels": LABELS, - "insertId": IID, - "severity": SEVERITY, - "httpRequest": REQUEST, - "timestamp": "2016-12-31T00:01:02.999999Z", - "resource": RESOURCE._to_dict(), - "trace": TRACE, - "spanId": SPANID, - "traceSampled": True, - } - ] - client1 = _Client(self.PROJECT) - client2 = _Client(self.PROJECT) - api = client2.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) - - logger.log_struct( - STRUCT, - log_name=ALT_LOG_NAME, - client=client2, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - ) - - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - - def test_log_proto_defaults(self): - import json - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct, Value - - message = Struct(fields={"foo": Value(bool_value=True)}) - ENTRIES = [ - { - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "protoPayload": json.loads(MessageToJson(message)), - "resource": {"type": "global", "labels": {}}, - } - ] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client) - - logger.log_proto(message) - - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - - def test_log_proto_w_default_labels(self): - import json - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct, Value - - message = Struct(fields={"foo": Value(bool_value=True)}) - DEFAULT_LABELS = {"foo": "spam"} - ENTRIES = [ - { - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "protoPayload": json.loads(MessageToJson(message)), - "resource": {"type": "global", "labels": {}}, - "labels": DEFAULT_LABELS, - } - ] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) - - logger.log_proto(message) - - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - - def test_log_proto_w_explicit(self): - import json - import datetime - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - from google.cloud.logging.resource import Resource - - message = Struct(fields={"foo": Value(bool_value=True)}) - ALT_LOG_NAME = "projects/foo/logs/alt.log.name" - DEFAULT_LABELS = {"foo": "spam"} - LABELS = {"foo": "bar", "baz": "qux"} - IID = "IID" - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - RESOURCE = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} - ) - ENTRIES = [ - { - "logName": ALT_LOG_NAME, - "protoPayload": json.loads(MessageToJson(message)), - "labels": LABELS, - "insertId": IID, - "severity": SEVERITY, - "httpRequest": REQUEST, - "timestamp": "2016-12-31T00:01:02.999999Z", - "resource": RESOURCE._to_dict(), - "trace": TRACE, - "spanId": SPANID, - "traceSampled": True, - } - ] - client1 = _Client(self.PROJECT) - client2 = _Client(self.PROJECT) - api = client2.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) - - logger.log_proto( - message, - log_name=ALT_LOG_NAME, - client=client2, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - ) - - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - - def test_delete_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client) - - logger.delete() - - self.assertEqual( - api._logger_delete_called_with, (self.PROJECT, self.LOGGER_NAME) - ) - - def test_delete_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client1) - - logger.delete(client=client2) - - self.assertEqual( - api._logger_delete_called_with, (self.PROJECT, self.LOGGER_NAME) - ) - - def test_list_entries_defaults(self): - import six - from google.cloud.logging.client import Client - - TOKEN = "TOKEN" - - client = Client( - project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False - ) - returned = {"nextPageToken": TOKEN} - client._connection = _Connection(returned) - - logger = self._make_one(self.LOGGER_NAME, client=client) - - iterator = logger.list_entries() - page = six.next(iterator.pages) - entries = list(page) - token = iterator.next_page_token - - self.assertEqual(len(entries), 0) - self.assertEqual(token, TOKEN) - called_with = client._connection._called_with - FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) - self.assertEqual( - called_with, - { - "method": "POST", - "path": "/entries:list", - "data": {"filter": FILTER, "projectIds": [self.PROJECT]}, - }, - ) - - def test_list_entries_explicit(self): - from google.cloud.logging import DESCENDING - from google.cloud.logging.client import Client - - PROJECT1 = "PROJECT1" - PROJECT2 = "PROJECT2" - FILTER = "resource.type:global" - TOKEN = "TOKEN" - PAGE_SIZE = 42 - client = Client( - project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False - ) - client._connection = _Connection({}) - logger = self._make_one(self.LOGGER_NAME, client=client) - iterator = logger.list_entries( - projects=[PROJECT1, PROJECT2], - filter_=FILTER, - order_by=DESCENDING, - page_size=PAGE_SIZE, - page_token=TOKEN, - ) - entries = list(iterator) - token = iterator.next_page_token - - self.assertEqual(len(entries), 0) - self.assertIsNone(token) - # self.assertEqual(client._listed, LISTED) - called_with = client._connection._called_with - combined_filter = "%s AND logName=projects/%s/logs/%s" % ( - FILTER, - self.PROJECT, - self.LOGGER_NAME, - ) - self.assertEqual( - called_with, - { - "method": "POST", - "path": "/entries:list", - "data": { - "filter": combined_filter, - "orderBy": DESCENDING, - "pageSize": PAGE_SIZE, - "pageToken": TOKEN, - "projectIds": [PROJECT1, PROJECT2], - }, - }, - ) - - -class TestBatch(unittest.TestCase): - - PROJECT = "test-project" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.logger import Batch - - return Batch - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_ctor_defaults(self): - logger = _Logger() - client = _Client(project=self.PROJECT) - batch = self._make_one(logger, client) - self.assertIs(batch.logger, logger) - self.assertIs(batch.client, client) - self.assertEqual(len(batch.entries), 0) - - def test_log_empty_defaults(self): - from google.cloud.logging.entries import LogEntry - - ENTRY = LogEntry() - client = _Client(project=self.PROJECT, connection=_make_credentials()) - logger = _Logger() - batch = self._make_one(logger, client=client) - batch.log_empty() - self.assertEqual(batch.entries, [ENTRY]) - - def test_log_empty_explicit(self): - import datetime - from google.cloud.logging.resource import Resource - from google.cloud.logging.entries import LogEntry - - LABELS = {"foo": "bar", "baz": "qux"} - IID = "IID" - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - RESOURCE = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} - ) - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - ENTRY = LogEntry( - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - ) - - client = _Client(project=self.PROJECT, connection=_make_credentials()) - logger = _Logger() - batch = self._make_one(logger, client=client) - batch.log_empty( - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - ) - self.assertEqual(batch.entries, [ENTRY]) - - def test_log_text_defaults(self): - from google.cloud.logging.entries import _GLOBAL_RESOURCE - from google.cloud.logging.entries import TextEntry - - TEXT = "This is the entry text" - ENTRY = TextEntry(payload=TEXT, resource=_GLOBAL_RESOURCE) - client = _Client(project=self.PROJECT, connection=_make_credentials()) - logger = _Logger() - batch = self._make_one(logger, client=client) - batch.log_text(TEXT) - self.assertEqual(batch.entries, [ENTRY]) - - def test_log_text_explicit(self): - import datetime - from google.cloud.logging.resource import Resource - from google.cloud.logging.entries import TextEntry - - TEXT = "This is the entry text" - LABELS = {"foo": "bar", "baz": "qux"} - IID = "IID" - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - RESOURCE = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} - ) - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - ENTRY = TextEntry( - payload=TEXT, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - ) - - client = _Client(project=self.PROJECT, connection=_make_credentials()) - logger = _Logger() - batch = self._make_one(logger, client=client) - batch.log_text( - TEXT, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - ) - self.assertEqual(batch.entries, [ENTRY]) - - def test_log_struct_defaults(self): - from google.cloud.logging.entries import _GLOBAL_RESOURCE - from google.cloud.logging.entries import StructEntry - - STRUCT = {"message": "Message text", "weather": "partly cloudy"} - ENTRY = StructEntry(payload=STRUCT, resource=_GLOBAL_RESOURCE) - client = _Client(project=self.PROJECT, connection=_make_credentials()) - logger = _Logger() - batch = self._make_one(logger, client=client) - batch.log_struct(STRUCT) - self.assertEqual(batch.entries, [ENTRY]) - - def test_log_struct_explicit(self): - import datetime - from google.cloud.logging.resource import Resource - from google.cloud.logging.entries import StructEntry - - STRUCT = {"message": "Message text", "weather": "partly cloudy"} - LABELS = {"foo": "bar", "baz": "qux"} - IID = "IID" - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - RESOURCE = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} - ) - ENTRY = StructEntry( - payload=STRUCT, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - ) - - client = _Client(project=self.PROJECT, connection=_make_credentials()) - logger = _Logger() - batch = self._make_one(logger, client=client) - batch.log_struct( - STRUCT, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - ) - self.assertEqual(batch.entries, [ENTRY]) - - def test_log_proto_defaults(self): - from google.cloud.logging.entries import _GLOBAL_RESOURCE - from google.cloud.logging.entries import ProtobufEntry - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - message = Struct(fields={"foo": Value(bool_value=True)}) - ENTRY = ProtobufEntry(payload=message, resource=_GLOBAL_RESOURCE) - client = _Client(project=self.PROJECT, connection=_make_credentials()) - logger = _Logger() - batch = self._make_one(logger, client=client) - batch.log_proto(message) - self.assertEqual(batch.entries, [ENTRY]) - - def test_log_proto_explicit(self): - import datetime - from google.cloud.logging.resource import Resource - from google.cloud.logging.entries import ProtobufEntry - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - message = Struct(fields={"foo": Value(bool_value=True)}) - LABELS = {"foo": "bar", "baz": "qux"} - IID = "IID" - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - TRACE = "12345678-1234-5678-1234-567812345678" - SPANID = "000000000000004a" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - RESOURCE = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} - ) - ENTRY = ProtobufEntry( - payload=message, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - ) - client = _Client(project=self.PROJECT, connection=_make_credentials()) - logger = _Logger() - batch = self._make_one(logger, client=client) - batch.log_proto( - message, - labels=LABELS, - insert_id=IID, - severity=SEVERITY, - http_request=REQUEST, - timestamp=TIMESTAMP, - resource=RESOURCE, - trace=TRACE, - span_id=SPANID, - trace_sampled=True, - ) - self.assertEqual(batch.entries, [ENTRY]) - - def test_commit_w_unknown_entry_type(self): - from google.cloud.logging.entries import _GLOBAL_RESOURCE - from google.cloud.logging.entries import LogEntry - - logger = _Logger() - client = _Client(project=self.PROJECT, connection=_make_credentials()) - api = client.logging_api = _DummyLoggingAPI() - batch = self._make_one(logger, client) - batch.entries.append(LogEntry(severity="blah")) - ENTRY = {"severity": "blah", "resource": _GLOBAL_RESOURCE._to_dict()} - - batch.commit() - - self.assertEqual(list(batch.entries), []) - self.assertEqual( - api._write_entries_called_with, ([ENTRY], logger.full_name, None, None) - ) - - def test_commit_w_resource_specified(self): - from google.cloud.logging.entries import _GLOBAL_RESOURCE - from google.cloud.logging.resource import Resource - - logger = _Logger() - client = _Client(project=self.PROJECT, connection=_make_credentials()) - api = client.logging_api = _DummyLoggingAPI() - RESOURCE = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} - ) - - batch = self._make_one(logger, client, resource=RESOURCE) - MESSAGE = "This is the entry text" - ENTRIES = [ - {"textPayload": MESSAGE}, - {"textPayload": MESSAGE, "resource": _GLOBAL_RESOURCE._to_dict()}, - ] - batch.log_text(MESSAGE, resource=None) - batch.log_text(MESSAGE) - batch.commit() - self.assertEqual( - api._write_entries_called_with, - (ENTRIES, logger.full_name, RESOURCE._to_dict(), None), - ) - - def test_commit_w_bound_client(self): - import json - import datetime - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - from google.cloud._helpers import _datetime_to_rfc3339 - from google.cloud.logging.entries import _GLOBAL_RESOURCE - - TEXT = "This is the entry text" - STRUCT = {"message": TEXT, "weather": "partly cloudy"} - message = Struct(fields={"foo": Value(bool_value=True)}) - IID1 = "IID1" - IID2 = "IID2" - IID3 = "IID3" - TIMESTAMP1 = datetime.datetime(2016, 12, 31, 0, 0, 1, 999999) - TIMESTAMP2 = datetime.datetime(2016, 12, 31, 0, 0, 2, 999999) - TIMESTAMP3 = datetime.datetime(2016, 12, 31, 0, 0, 3, 999999) - TRACE1 = "12345678-1234-5678-1234-567812345678" - TRACE2 = "12345678-1234-5678-1234-567812345679" - TRACE3 = "12345678-1234-5678-1234-567812345670" - SPANID1 = "000000000000004a" - SPANID2 = "000000000000004b" - SPANID3 = "000000000000004c" - ENTRIES = [ - { - "textPayload": TEXT, - "insertId": IID1, - "timestamp": _datetime_to_rfc3339(TIMESTAMP1), - "resource": _GLOBAL_RESOURCE._to_dict(), - "trace": TRACE1, - "spanId": SPANID1, - "traceSampled": True, - }, - { - "jsonPayload": STRUCT, - "insertId": IID2, - "timestamp": _datetime_to_rfc3339(TIMESTAMP2), - "resource": _GLOBAL_RESOURCE._to_dict(), - "trace": TRACE2, - "spanId": SPANID2, - "traceSampled": False, - }, - { - "protoPayload": json.loads(MessageToJson(message)), - "insertId": IID3, - "timestamp": _datetime_to_rfc3339(TIMESTAMP3), - "resource": _GLOBAL_RESOURCE._to_dict(), - "trace": TRACE3, - "spanId": SPANID3, - "traceSampled": True, - }, - ] - client = _Client(project=self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = _Logger() - batch = self._make_one(logger, client=client) - - batch.log_text( - TEXT, - insert_id=IID1, - timestamp=TIMESTAMP1, - trace=TRACE1, - span_id=SPANID1, - trace_sampled=True, - ) - batch.log_struct( - STRUCT, - insert_id=IID2, - timestamp=TIMESTAMP2, - trace=TRACE2, - span_id=SPANID2, - trace_sampled=False, - ) - batch.log_proto( - message, - insert_id=IID3, - timestamp=TIMESTAMP3, - trace=TRACE3, - span_id=SPANID3, - trace_sampled=True, - ) - batch.commit() - - self.assertEqual(list(batch.entries), []) - self.assertEqual( - api._write_entries_called_with, (ENTRIES, logger.full_name, None, None) - ) - - def test_commit_w_alternate_client(self): - import json - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - from google.cloud.logging.logger import Logger - from google.cloud.logging.entries import _GLOBAL_RESOURCE - - TEXT = "This is the entry text" - STRUCT = {"message": TEXT, "weather": "partly cloudy"} - message = Struct(fields={"foo": Value(bool_value=True)}) - DEFAULT_LABELS = {"foo": "spam"} - LABELS = {"foo": "bar", "baz": "qux"} - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.logging_api = _DummyLoggingAPI() - logger = Logger("logger_name", client1, labels=DEFAULT_LABELS) - ENTRIES = [ - { - "textPayload": TEXT, - "labels": LABELS, - "resource": _GLOBAL_RESOURCE._to_dict(), - }, - { - "jsonPayload": STRUCT, - "severity": SEVERITY, - "resource": _GLOBAL_RESOURCE._to_dict(), - }, - { - "protoPayload": json.loads(MessageToJson(message)), - "httpRequest": REQUEST, - "resource": _GLOBAL_RESOURCE._to_dict(), - }, - ] - batch = self._make_one(logger, client=client1) - - batch.log_text(TEXT, labels=LABELS) - batch.log_struct(STRUCT, severity=SEVERITY) - batch.log_proto(message, http_request=REQUEST) - batch.commit(client=client2) - - self.assertEqual(list(batch.entries), []) - self.assertEqual( - api._write_entries_called_with, - (ENTRIES, logger.full_name, None, DEFAULT_LABELS), - ) - - def test_context_mgr_success(self): - import json - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - from google.cloud.logging.logger import Logger - from google.cloud.logging.entries import _GLOBAL_RESOURCE - - TEXT = "This is the entry text" - STRUCT = {"message": TEXT, "weather": "partly cloudy"} - message = Struct(fields={"foo": Value(bool_value=True)}) - DEFAULT_LABELS = {"foo": "spam"} - LABELS = {"foo": "bar", "baz": "qux"} - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - client = _Client(project=self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = Logger("logger_name", client, labels=DEFAULT_LABELS) - ENTRIES = [ - { - "textPayload": TEXT, - "httpRequest": REQUEST, - "resource": _GLOBAL_RESOURCE._to_dict(), - }, - { - "jsonPayload": STRUCT, - "labels": LABELS, - "resource": _GLOBAL_RESOURCE._to_dict(), - }, - { - "protoPayload": json.loads(MessageToJson(message)), - "resource": _GLOBAL_RESOURCE._to_dict(), - "severity": SEVERITY, - }, - ] - batch = self._make_one(logger, client=client) - - with batch as other: - other.log_text(TEXT, http_request=REQUEST) - other.log_struct(STRUCT, labels=LABELS) - other.log_proto(message, severity=SEVERITY) - - self.assertEqual(list(batch.entries), []) - self.assertEqual( - api._write_entries_called_with, - (ENTRIES, logger.full_name, None, DEFAULT_LABELS), - ) - - def test_context_mgr_failure(self): - import datetime - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - from google.cloud.logging.entries import TextEntry - from google.cloud.logging.entries import StructEntry - from google.cloud.logging.entries import ProtobufEntry - - TEXT = "This is the entry text" - STRUCT = {"message": TEXT, "weather": "partly cloudy"} - LABELS = {"foo": "bar", "baz": "qux"} - IID = "IID" - SEVERITY = "CRITICAL" - METHOD = "POST" - URI = "https://api.example.com/endpoint" - STATUS = "500" - REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - message = Struct(fields={"foo": Value(bool_value=True)}) - client = _Client(project=self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = _Logger() - UNSENT = [ - TextEntry(payload=TEXT, insert_id=IID, timestamp=TIMESTAMP), - StructEntry(payload=STRUCT, severity=SEVERITY), - ProtobufEntry(payload=message, labels=LABELS, http_request=REQUEST), - ] - batch = self._make_one(logger, client=client) - - try: - with batch as other: - other.log_text(TEXT, insert_id=IID, timestamp=TIMESTAMP) - other.log_struct(STRUCT, severity=SEVERITY) - other.log_proto(message, labels=LABELS, http_request=REQUEST) - raise _Bugout() - except _Bugout: - pass - - self.assertEqual(list(batch.entries), UNSENT) - self.assertIsNone(api._write_entries_called_with) - - -class _Logger(object): - - labels = None - - def __init__(self, name="NAME", project="PROJECT"): - self.full_name = "projects/%s/logs/%s" % (project, name) - - -class _DummyLoggingAPI(object): - - _write_entries_called_with = None - - def write_entries(self, entries, logger_name=None, resource=None, labels=None): - self._write_entries_called_with = (entries, logger_name, resource, labels) - - def logger_delete(self, project, logger_name): - self._logger_delete_called_with = (project, logger_name) - - -class _Client(object): - def __init__(self, project, connection=None): - self.project = project - self._connection = connection - - -class _Bugout(Exception): - pass - - -class _Connection(object): - - _called_with = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - self._called_with = kw - response, self._responses = self._responses[0], self._responses[1:] - return response diff --git a/logging/tests/unit/test_metric.py b/logging/tests/unit/test_metric.py deleted file mode 100644 index 93ee90b87470..000000000000 --- a/logging/tests/unit/test_metric.py +++ /dev/null @@ -1,247 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class TestMetric(unittest.TestCase): - - PROJECT = "test-project" - METRIC_NAME = "metric-name" - FILTER = "logName:syslog AND severity>=ERROR" - DESCRIPTION = "DESCRIPTION" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.metric import Metric - - return Metric - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - FULL = "projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) - client = _Client(self.PROJECT) - metric = self._make_one(self.METRIC_NAME, client=client) - self.assertEqual(metric.name, self.METRIC_NAME) - self.assertIsNone(metric.filter_) - self.assertEqual(metric.description, "") - self.assertIs(metric.client, client) - self.assertEqual(metric.project, self.PROJECT) - self.assertEqual(metric.full_name, FULL) - self.assertEqual(metric.path, "/%s" % (FULL,)) - - def test_ctor_explicit(self): - FULL = "projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) - client = _Client(self.PROJECT) - metric = self._make_one( - self.METRIC_NAME, self.FILTER, client=client, description=self.DESCRIPTION - ) - self.assertEqual(metric.name, self.METRIC_NAME) - self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, self.DESCRIPTION) - self.assertIs(metric.client, client) - self.assertEqual(metric.project, self.PROJECT) - self.assertEqual(metric.full_name, FULL) - self.assertEqual(metric.path, "/%s" % (FULL,)) - - def test_from_api_repr_minimal(self): - client = _Client(project=self.PROJECT) - FULL = "projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) - RESOURCE = {"name": self.METRIC_NAME, "filter": self.FILTER} - klass = self._get_target_class() - metric = klass.from_api_repr(RESOURCE, client=client) - self.assertEqual(metric.name, self.METRIC_NAME) - self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, "") - self.assertIs(metric._client, client) - self.assertEqual(metric.project, self.PROJECT) - self.assertEqual(metric.full_name, FULL) - - def test_from_api_repr_w_description(self): - client = _Client(project=self.PROJECT) - FULL = "projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) - DESCRIPTION = "DESCRIPTION" - RESOURCE = { - "name": self.METRIC_NAME, - "filter": self.FILTER, - "description": DESCRIPTION, - } - klass = self._get_target_class() - metric = klass.from_api_repr(RESOURCE, client=client) - self.assertEqual(metric.name, self.METRIC_NAME) - self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, DESCRIPTION) - self.assertIs(metric._client, client) - self.assertEqual(metric.project, self.PROJECT) - self.assertEqual(metric.full_name, FULL) - - def test_create_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.metrics_api = _DummyMetricsAPI() - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client) - - metric.create() - - self.assertEqual( - api._metric_create_called_with, - (self.PROJECT, self.METRIC_NAME, self.FILTER, ""), - ) - - def test_create_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.metrics_api = _DummyMetricsAPI() - metric = self._make_one( - self.METRIC_NAME, self.FILTER, client=client1, description=self.DESCRIPTION - ) - - metric.create(client=client2) - - self.assertEqual( - api._metric_create_called_with, - (self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION), - ) - - def test_exists_miss_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.metrics_api = _DummyMetricsAPI() - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client) - - self.assertFalse(metric.exists()) - - self.assertEqual(api._metric_get_called_with, (self.PROJECT, self.METRIC_NAME)) - - def test_exists_hit_w_alternate_client(self): - RESOURCE = {"name": self.METRIC_NAME, "filter": self.FILTER} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.metrics_api = _DummyMetricsAPI() - api._metric_get_response = RESOURCE - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1) - - self.assertTrue(metric.exists(client=client2)) - - self.assertEqual(api._metric_get_called_with, (self.PROJECT, self.METRIC_NAME)) - - def test_reload_w_bound_client(self): - NEW_FILTER = "logName:syslog AND severity>=INFO" - RESOURCE = {"name": self.METRIC_NAME, "filter": NEW_FILTER} - client = _Client(project=self.PROJECT) - api = client.metrics_api = _DummyMetricsAPI() - api._metric_get_response = RESOURCE - metric = self._make_one( - self.METRIC_NAME, self.FILTER, client=client, description=self.DESCRIPTION - ) - - metric.reload() - - self.assertEqual(metric.filter_, NEW_FILTER) - self.assertEqual(metric.description, "") - self.assertEqual(api._metric_get_called_with, (self.PROJECT, self.METRIC_NAME)) - - def test_reload_w_alternate_client(self): - NEW_FILTER = "logName:syslog AND severity>=INFO" - RESOURCE = { - "name": self.METRIC_NAME, - "description": self.DESCRIPTION, - "filter": NEW_FILTER, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.metrics_api = _DummyMetricsAPI() - api._metric_get_response = RESOURCE - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1) - - metric.reload(client=client2) - - self.assertEqual(metric.filter_, NEW_FILTER) - self.assertEqual(metric.description, self.DESCRIPTION) - self.assertEqual(api._metric_get_called_with, (self.PROJECT, self.METRIC_NAME)) - - def test_update_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.metrics_api = _DummyMetricsAPI() - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client) - - metric.update() - - self.assertEqual( - api._metric_update_called_with, - (self.PROJECT, self.METRIC_NAME, self.FILTER, ""), - ) - - def test_update_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.metrics_api = _DummyMetricsAPI() - metric = self._make_one( - self.METRIC_NAME, self.FILTER, client=client1, description=self.DESCRIPTION - ) - - metric.update(client=client2) - - self.assertEqual( - api._metric_update_called_with, - (self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION), - ) - - def test_delete_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.metrics_api = _DummyMetricsAPI() - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client) - - metric.delete() - - self.assertEqual( - api._metric_delete_called_with, (self.PROJECT, self.METRIC_NAME) - ) - - def test_delete_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.metrics_api = _DummyMetricsAPI() - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1) - - metric.delete(client=client2) - - self.assertEqual( - api._metric_delete_called_with, (self.PROJECT, self.METRIC_NAME) - ) - - -class _Client(object): - def __init__(self, project): - self.project = project - - -class _DummyMetricsAPI(object): - def metric_create(self, project, metric_name, filter_, description): - self._metric_create_called_with = (project, metric_name, filter_, description) - - def metric_get(self, project, metric_name): - from google.cloud.exceptions import NotFound - - self._metric_get_called_with = (project, metric_name) - try: - return self._metric_get_response - except AttributeError: - raise NotFound("miss") - - def metric_update(self, project, metric_name, filter_, description): - self._metric_update_called_with = (project, metric_name, filter_, description) - - def metric_delete(self, project, metric_name): - self._metric_delete_called_with = (project, metric_name) diff --git a/logging/tests/unit/test_sink.py b/logging/tests/unit/test_sink.py deleted file mode 100644 index dc1ff9563f9c..000000000000 --- a/logging/tests/unit/test_sink.py +++ /dev/null @@ -1,322 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class TestSink(unittest.TestCase): - - PROJECT = "test-project" - SINK_NAME = "sink-name" - FILTER = "logName:syslog AND severity>=INFO" - DESTINATION_URI = "faux.googleapis.com/destination" - WRITER_IDENTITY = "serviceAccount:project-123@example.com" - - @staticmethod - def _get_target_class(): - from google.cloud.logging.sink import Sink - - return Sink - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - FULL = "projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) - client = _Client(self.PROJECT) - sink = self._make_one(self.SINK_NAME, client=client) - self.assertEqual(sink.name, self.SINK_NAME) - self.assertIsNone(sink.filter_) - self.assertIsNone(sink.destination) - self.assertIs(sink.client, client) - self.assertEqual(sink.project, self.PROJECT) - self.assertEqual(sink.full_name, FULL) - self.assertEqual(sink.path, "/%s" % (FULL,)) - - def test_ctor_explicit(self): - FULL = "projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) - client = _Client(self.PROJECT) - sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client - ) - self.assertEqual(sink.name, self.SINK_NAME) - self.assertEqual(sink.filter_, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIs(sink.client, client) - self.assertEqual(sink.project, self.PROJECT) - self.assertEqual(sink.full_name, FULL) - self.assertEqual(sink.path, "/%s" % (FULL,)) - - def test_from_api_repr_minimal(self): - client = _Client(project=self.PROJECT) - FULL = "projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) - RESOURCE = {"name": self.SINK_NAME, "destination": self.DESTINATION_URI} - klass = self._get_target_class() - sink = klass.from_api_repr(RESOURCE, client=client) - self.assertEqual(sink.name, self.SINK_NAME) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIsNone(sink.filter_) - self.assertIsNone(sink.writer_identity) - self.assertIs(sink._client, client) - self.assertEqual(sink.project, self.PROJECT) - self.assertEqual(sink.full_name, FULL) - - def test_from_api_repr_full(self): - client = _Client(project=self.PROJECT) - FULL = "projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) - RESOURCE = { - "name": self.SINK_NAME, - "destination": self.DESTINATION_URI, - "filter": self.FILTER, - "writerIdentity": self.WRITER_IDENTITY, - } - klass = self._get_target_class() - sink = klass.from_api_repr(RESOURCE, client=client) - self.assertEqual(sink.name, self.SINK_NAME) - self.assertEqual(sink.filter_, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) - self.assertIs(sink._client, client) - self.assertEqual(sink.project, self.PROJECT) - self.assertEqual(sink.full_name, FULL) - - def test_create_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.sinks_api = _DummySinksAPI() - api._sink_create_response = { - "name": self.SINK_NAME, - "filter": self.FILTER, - "destination": self.DESTINATION_URI, - "writerIdentity": self.WRITER_IDENTITY, - } - sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client - ) - - sink.create() - - self.assertEqual(sink.name, self.SINK_NAME) - self.assertEqual(sink.filter_, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) - self.assertEqual( - api._sink_create_called_with, - (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, False), - ) - - def test_create_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1 - ) - api = client2.sinks_api = _DummySinksAPI() - api._sink_create_response = { - "name": self.SINK_NAME, - "filter": self.FILTER, - "destination": self.DESTINATION_URI, - "writerIdentity": self.WRITER_IDENTITY, - } - - sink.create(client=client2, unique_writer_identity=True) - - self.assertEqual(sink.name, self.SINK_NAME) - self.assertEqual(sink.filter_, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) - self.assertEqual( - api._sink_create_called_with, - (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, True), - ) - - def test_exists_miss_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.sinks_api = _DummySinksAPI() - sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client - ) - - self.assertFalse(sink.exists()) - - self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) - - def test_exists_hit_w_alternate_client(self): - RESOURCE = { - "name": self.SINK_NAME, - "filter": self.FILTER, - "destination": self.DESTINATION_URI, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.sinks_api = _DummySinksAPI() - api._sink_get_response = RESOURCE - sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1 - ) - - self.assertTrue(sink.exists(client=client2)) - - self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) - - def test_reload_w_bound_client(self): - NEW_DESTINATION_URI = "faux.googleapis.com/other" - RESOURCE = {"name": self.SINK_NAME, "destination": NEW_DESTINATION_URI} - client = _Client(project=self.PROJECT) - api = client.sinks_api = _DummySinksAPI() - api._sink_get_response = RESOURCE - sink = self._make_one(self.SINK_NAME, client=client) - - sink.reload() - - self.assertEqual(sink.destination, NEW_DESTINATION_URI) - self.assertIsNone(sink.filter_) - self.assertIsNone(sink.writer_identity) - self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) - - def test_reload_w_alternate_client(self): - NEW_FILTER = "logName:syslog AND severity>=INFO" - NEW_DESTINATION_URI = "faux.googleapis.com/other" - RESOURCE = { - "name": self.SINK_NAME, - "filter": NEW_FILTER, - "destination": NEW_DESTINATION_URI, - "writerIdentity": self.WRITER_IDENTITY, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.sinks_api = _DummySinksAPI() - api._sink_get_response = RESOURCE - sink = self._make_one(self.SINK_NAME, client=client1) - - sink.reload(client=client2) - - self.assertEqual(sink.destination, NEW_DESTINATION_URI) - self.assertEqual(sink.filter_, NEW_FILTER) - self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) - self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) - - def test_update_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.sinks_api = _DummySinksAPI() - api._sink_update_response = { - "name": self.SINK_NAME, - "filter": self.FILTER, - "destination": self.DESTINATION_URI, - "writerIdentity": self.WRITER_IDENTITY, - } - sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client - ) - - sink.update() - - self.assertEqual(sink.name, self.SINK_NAME) - self.assertEqual(sink.filter_, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) - self.assertEqual( - api._sink_update_called_with, - (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, False), - ) - - def test_update_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.sinks_api = _DummySinksAPI() - api._sink_update_response = { - "name": self.SINK_NAME, - "filter": self.FILTER, - "destination": self.DESTINATION_URI, - "writerIdentity": self.WRITER_IDENTITY, - } - sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1 - ) - - sink.update(client=client2, unique_writer_identity=True) - - self.assertEqual(sink.name, self.SINK_NAME) - self.assertEqual(sink.filter_, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) - self.assertEqual( - api._sink_update_called_with, - (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, True), - ) - - def test_delete_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.sinks_api = _DummySinksAPI() - sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client - ) - - sink.delete() - - self.assertEqual(api._sink_delete_called_with, (self.PROJECT, self.SINK_NAME)) - - def test_delete_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.sinks_api = _DummySinksAPI() - sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1 - ) - - sink.delete(client=client2) - - self.assertEqual(api._sink_delete_called_with, (self.PROJECT, self.SINK_NAME)) - - -class _Client(object): - def __init__(self, project): - self.project = project - - -class _DummySinksAPI(object): - def sink_create( - self, project, sink_name, filter_, destination, unique_writer_identity=False - ): - self._sink_create_called_with = ( - project, - sink_name, - filter_, - destination, - unique_writer_identity, - ) - return self._sink_create_response - - def sink_get(self, project, sink_name): - from google.cloud.exceptions import NotFound - - self._sink_get_called_with = (project, sink_name) - try: - return self._sink_get_response - except AttributeError: - raise NotFound("miss") - - def sink_update( - self, project, sink_name, filter_, destination, unique_writer_identity=False - ): - self._sink_update_called_with = ( - project, - sink_name, - filter_, - destination, - unique_writer_identity, - ) - return self._sink_update_response - - def sink_delete(self, project, sink_name): - self._sink_delete_called_with = (project, sink_name) diff --git a/monitoring/.coveragerc b/monitoring/.coveragerc deleted file mode 100644 index b178b094aa1d..000000000000 --- a/monitoring/.coveragerc +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[run] -branch = True - -[report] -fail_under = 100 -show_missing = True -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file diff --git a/monitoring/.flake8 b/monitoring/.flake8 deleted file mode 100644 index 0268ecc9c55c..000000000000 --- a/monitoring/.flake8 +++ /dev/null @@ -1,14 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - *_pb2.py - - # Standard linting exemptions. - __pycache__, - .git, - *.pyc, - conf.py diff --git a/monitoring/.repo-metadata.json b/monitoring/.repo-metadata.json deleted file mode 100644 index 48920988588f..000000000000 --- a/monitoring/.repo-metadata.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "name": "monitoring", - "name_pretty": "Stackdriver Monitoring", - "product_documentation": "https://cloud.google.com/monitoring/docs", - "client_documentation": "https://googleapis.dev/python/monitoring/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559785", - "release_level": "alpha", - "language": "python", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-monitoring", - "api_id": "monitoring.googleapis.com" -} \ No newline at end of file diff --git a/monitoring/CHANGELOG.md b/monitoring/CHANGELOG.md deleted file mode 100644 index a427e8ab1415..000000000000 --- a/monitoring/CHANGELOG.md +++ /dev/null @@ -1,200 +0,0 @@ -# Changelog - -[PyPI History][1] - -[1]: https://pypi.org/project/google-cloud-monitoring/#history - -## 0.34.0 - -11-19-2019 14:27 PST - -### Implementation Changes -- Deprecate resource name helper methods; update docs configuration (via synth). ([#9838](https://github.com/googleapis/google-cloud-python/pull/9838)) - -### New Features -- Add service monitoring (via synth). ([#9799](https://github.com/googleapis/google-cloud-python/pull/9799)) -- Add `monitoring.v3.InternalChecker.state` (via synth). ([#9546](https://github.com/googleapis/google-cloud-python/pull/9546)) -- Add `monitoring.v3.UptimeCheckConfig.ContentMatcher.ContentMatcherOption` (via synth). ([#9546](https://github.com/googleapis/google-cloud-python/pull/9546)) -- Add `recursive` parameter to `delete_group` (via synth). ([#9546](https://github.com/googleapis/google-cloud-python/pull/9546)) -- Add read-only `validity` field to `monitoring.v3.AlertPolicy` (via synth). ([#9546](https://github.com/googleapis/google-cloud-python/pull/9546)) -- Add `validate_ssl` parameter to `monitoring.v3.UptimeCheckConfig.HttpCheck` (via synth). ([#9546](https://github.com/googleapis/google-cloud-python/pull/9546)) - -### Documentation -- Add python 2 sunset banner to documentation. ([#9036](https://github.com/googleapis/google-cloud-python/pull/9036)) -- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) -- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) -- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) - -### Internal / Testing Changes -- Revert change to docs/conf.py. ([#9803](https://github.com/googleapis/google-cloud-python/pull/9803)) -- Normalize VPCSC configuration in systests. ([#9615](https://github.com/googleapis/google-cloud-python/pull/9615)) -- Make VPCSC env comparison case-insensitive. ([#9564](https://github.com/googleapis/google-cloud-python/pull/9564)) -- Refresh VPCSC tests. ([#9437](https://github.com/googleapis/google-cloud-python/pull/9437)) -- Fix environment variables for VPC tests. ([#8302](https://github.com/googleapis/google-cloud-python/pull/8302)) - -## 0.33.0 - -08-12-2019 13:54 PDT - -### New Features -- Add notification channel verification; remove send/recv msg size limit (via synth). ([#8980](https://github.com/googleapis/google-cloud-python/pull/8980)) - -### Documentation -- Normalize docs. ([#8994](https://github.com/googleapis/google-cloud-python/pull/8994)) -- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) - -## 0.32.0 - -07-24-2019 16:52 PDT - - -### Implementation Changes -- Allow kwargs to be passed to create_channel (via synth). ([#8397](https://github.com/googleapis/google-cloud-python/pull/8397)) -- Add routing header to method metadata, update docs config (via synth). ([#7642](https://github.com/googleapis/google-cloud-python/pull/7642)) -- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) -- Remove unused message exports. ([#7271](https://github.com/googleapis/google-cloud-python/pull/7271)) -- Protoc-generated serialization update. ([#7089](https://github.com/googleapis/google-cloud-python/pull/7089)) -- Pick up stub docstring fix in GAPIC generator. ([#6976](https://github.com/googleapis/google-cloud-python/pull/6976)) - -### New Features -- Add 'client_options' support, update list method docstrings (via synth). ([#8516](https://github.com/googleapis/google-cloud-python/pull/8516)) - -### Dependencies -- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) - -### Documentation -- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) -- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) -- Fixes [#8545](https://github.com/googleapis/google-cloud-python/pull/8545) by removing typing information for kwargs to not conflict with type checkers ([#8546](https://github.com/googleapis/google-cloud-python/pull/8546)) -- Update docstrings, copy lintified proto files (via synth). ([#7451](https://github.com/googleapis/google-cloud-python/pull/7451)) -- googlecloudplatform --> googleapis in READMEs ([#7411](https://github.com/googleapis/google-cloud-python/pull/7411)) -- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) -- Update copyright: 2018 -> 2019. ([#7151](https://github.com/googleapis/google-cloud-python/pull/7151)) - -### Internal / Testing Changes -- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) -- Declare encoding as utf-8 in pb2 files (via synth). ([#8358](https://github.com/googleapis/google-cloud-python/pull/8358)) -- Add disclaimer to auto-generated template files (via synth). ([#8321](https://github.com/googleapis/google-cloud-python/pull/8321)) -- Fix coverage in 'types.py' (via synth). ([#8159](https://github.com/googleapis/google-cloud-python/pull/8159)) -- Add empty lines (via synth). ([#8065](https://github.com/googleapis/google-cloud-python/pull/8065)) -- Add nox session `docs` (via synth). ([#7777](https://github.com/googleapis/google-cloud-python/pull/7777)) -- Regenerate VPCSC tests to include NotificationChannelService and UptimeCheckService. ([#7853](https://github.com/googleapis/google-cloud-python/pull/7853)) -- Set environment variables for VPCSC system tests. ([#7847](https://github.com/googleapis/google-cloud-python/pull/7847)) -- Add VPCSC system test. ([#7791](https://github.com/googleapis/google-cloud-python/pull/7791)) -- protobuf file housekeeping (no user-visible changes) (via synth). ([#7588](https://github.com/googleapis/google-cloud-python/pull/7588)) -- Add clarifying comment to blacken nox target. ([#7398](https://github.com/googleapis/google-cloud-python/pull/7398)) -- Trivial gapic-generator change. ([#7231](https://github.com/googleapis/google-cloud-python/pull/7231)) -- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) - -## 0.31.1 - -12-17-2018 16:51 PST - - -### Implementation Changes -- Import `iam.policy` from `google.api_core.iam.policy`. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) - -### Documentation -- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) -- Normalize docs for `page_size` / `max_results` / `page_token`. ([#6842](https://github.com/googleapis/google-cloud-python/pull/6842)) - -### Internal / Testing Changes -- Add baseline for synth.metadata -- Update noxfile. -- Blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) - -## 0.31.0 - -11-29-2018 13:03 PST - - -### Implementation Changes -- Pick up enum fixes in the GAPIC generator. ([#6614](https://github.com/googleapis/google-cloud-python/pull/6614)) -- Pick up fixes to the GAPIC generator. ([#6501](https://github.com/googleapis/google-cloud-python/pull/6501)) -- Fix client_info bug, update docstrings and timeouts. ([#6416](https://github.com/googleapis/google-cloud-python/pull/6416)) - -### Dependencies -- Bump minimum 'api_core' version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391)) - -### Documentation -- Docstring changes, 'account' -> 'workspace', via synth. ([#6461](https://github.com/googleapis/google-cloud-python/pull/6461)) -- Add 'dropped_labels', 'span_context', plus docstring changes. ([#6358](https://github.com/googleapis/google-cloud-python/pull/6358)) -- Fix GAX fossils ([#6264](https://github.com/googleapis/google-cloud-python/pull/6264)) -- Harmonize / DRY 'monitoring/README.rst' / 'monitoring/docs/index.rst'. ([#6156](https://github.com/googleapis/google-cloud-python/pull/6156)) - -### Internal / Testing Changes -- Run Black on Generated libraries ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) -- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) -- Assorted synth fixups / cleanups ([#6400](https://github.com/googleapis/google-cloud-python/pull/6400)) -- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) -- Fix long lines from autosynth ([#5961](https://github.com/googleapis/google-cloud-python/pull/5961) -- Test pandas under all supported Python versions ([#5858](https://github.com/googleapis/google-cloud-python/pull/5858)) - -## 0.30.1 - -### Implementation Changes -- Monitoring: Add Transports Layer to clients (#5594) -- Remove gRPC size restrictions (4MB default) (#5594) - -### Documentation -- Monitoring. Update documentation links. (#5557) - -## 0.30.0 - -### Implementation Changes -- Avoid overwriting '__module__' of messages from shared modules. (#5364) - -### New Features -- Add aliases for new V3 service clients. (#5424) - -### Documentation -- Remove link to `usage` on index of monitoring (#5272) - -### Internal / Testing Changes -- Modify system tests to use prerelease versions of grpcio (#5304) -- Add Test runs for Python 3.7 and remove 3.4 (#5295) - -## 0.29.0 - -### Implementation Changes -- Update monitoring library to use new generated client (#5212) -- Move aligner and reducer links from timeSeries.list to alertPolicies (#5011) - -### Internal / Testing Changes -- Fix bad trove classifier - -## 0.28.1 - -### Implementation changes - -- Convert label values to str in client.metric() (#4910) - -### Dependencies - -- Update dependency range for api-core to include v1.0.0 releases (#4944) - -### Documentation - -- Fixing "Fore" -> "For" typo in README docs. (#4317) - -### Testing and internal changes - -- Install local dependencies when running lint (#4936) -- Re-enable lint for tests, remove usage of pylint (#4921) -- Normalize all setup.py files (#4909) -- Making a `nox -s default` session for all packages. (#4324) -- Shorten test names (#4321) - -## 0.28.0 - -### Documentation - -- Added link to "Python Development Environment Setup Guide" in - project README (#4187, h/t to @michaelawyu) - -### Dependencies - -- Upgrading to `google-cloud-core >= 0.28.0` and adding dependency - on `google-api-core` (#4221, #4280) - -PyPI: https://pypi.org/project/google-cloud-monitoring/0.28.0/ diff --git a/monitoring/LICENSE b/monitoring/LICENSE deleted file mode 100644 index a8ee855de2aa..000000000000 --- a/monitoring/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - https://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/monitoring/MANIFEST.in b/monitoring/MANIFEST.in deleted file mode 100644 index 9cbf175afe6b..000000000000 --- a/monitoring/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include README.rst LICENSE -recursive-include google *.json *.proto -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/monitoring/README.rst b/monitoring/README.rst deleted file mode 100644 index 1f12505797eb..000000000000 --- a/monitoring/README.rst +++ /dev/null @@ -1,114 +0,0 @@ -Python Client for Stackdriver Monitoring API (`Alpha`_) -======================================================= - -|pypi| |versions| - -`Stackdriver Monitoring API`_: Manages your Stackdriver Monitoring data and -configurations. Most projects must be associated with a Stackdriver account, -with a few exceptions as noted on the individual method pages. - -- `Client Library Documentation`_ -- `Product Documentation`_ - -.. _Alpha: https://github.com/googleapis/google-cloud-python/blob/master/README.rst -.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-monitoring.svg - :target: https://pypi.org/project/google-cloud-monitoring/ -.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-monitoring.svg - :target: https://pypi.org/project/google-cloud-monitoring/ -.. _Stackdriver Monitoring API: https://cloud.google.com/monitoring/api/ref_v3/rest/ -.. _Client Library Documentation: https://googleapis.dev/python/monitoring/latest -.. _Product Documentation: https://cloud.google.com/monitoring/docs - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. `Enable the Stackdriver Monitoring API.`_ -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the Stackdriver Monitoring API.: https://cloud.google.com/monitoring/api/enable-api -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Supported Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 - -Deprecated Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - source /bin/activate - /bin/pip install google-cloud-monitoring - - -Windows -^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-monitoring - -Preview -~~~~~~~ - -MetricServiceClient -^^^^^^^^^^^^^^^^^^^ - -.. code:: py - - from google.cloud import monitoring_v3 - - client = monitoring_v3.MetricServiceClient() - - name = client.project_path('[PROJECT]') - - - # Iterate over all results - for element in client.list_monitored_resource_descriptors(name): - # process element - pass - - # Or iterate over results one page at a time - for page in client.list_monitored_resource_descriptors(name).pages: - for element in page: - # process element - pass - -Next Steps -~~~~~~~~~~ - - -- Read the `Client Library Documentation`_ for Stackdriver Monitoring API - to see other available methods on the client. -- Read the `Product documentation`_ to learn more about the product and see - How-to Guides. diff --git a/monitoring/docs/README.rst b/monitoring/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/monitoring/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/monitoring/docs/_static/custom.css b/monitoring/docs/_static/custom.css deleted file mode 100644 index 0abaf229fce3..000000000000 --- a/monitoring/docs/_static/custom.css +++ /dev/null @@ -1,4 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} \ No newline at end of file diff --git a/monitoring/docs/_templates/layout.html b/monitoring/docs/_templates/layout.html deleted file mode 100644 index 228529efe2d2..000000000000 --- a/monitoring/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/monitoring/docs/changelog.md b/monitoring/docs/changelog.md deleted file mode 120000 index 04c99a55caae..000000000000 --- a/monitoring/docs/changelog.md +++ /dev/null @@ -1 +0,0 @@ -../CHANGELOG.md \ No newline at end of file diff --git a/monitoring/docs/conf.py b/monitoring/docs/conf.py deleted file mode 100644 index 0eebd9f5abb0..000000000000 --- a/monitoring/docs/conf.py +++ /dev/null @@ -1,363 +0,0 @@ -# -*- coding: utf-8 -*- -# -# google-cloud-monitoring documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-monitoring" -copyright = u"2017, Google" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-monitoring-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - #'preamble': '', - # Latex figure (float) alignment - #'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-monitoring.tex", - u"google-cloud-monitoring Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-monitoring", - u"google-cloud-monitoring Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-monitoring", - u"google-cloud-monitoring Documentation", - author, - "google-cloud-monitoring", - "GAPIC library for the {metadata.shortName} v3 service", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/master/", None), - "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), - "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/monitoring/docs/gapic/v3/api.rst b/monitoring/docs/gapic/v3/api.rst deleted file mode 100644 index 615e004633e5..000000000000 --- a/monitoring/docs/gapic/v3/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Stackdriver Monitoring API -===================================== - -.. automodule:: google.cloud.monitoring_v3 - :members: - :inherited-members: \ No newline at end of file diff --git a/monitoring/docs/gapic/v3/types.rst b/monitoring/docs/gapic/v3/types.rst deleted file mode 100644 index 9790a948460e..000000000000 --- a/monitoring/docs/gapic/v3/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Stackdriver Monitoring API Client -=========================================== - -.. automodule:: google.cloud.monitoring_v3.types - :members: \ No newline at end of file diff --git a/monitoring/docs/index.rst b/monitoring/docs/index.rst deleted file mode 100644 index 49d617e4d0f7..000000000000 --- a/monitoring/docs/index.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. include:: README.rst - -Api Reference -------------- -.. toctree:: - :maxdepth: 2 - - query.rst - gapic/v3/api - gapic/v3/types - -Changelog ---------- - -For a list of all previous ``google-cloud-monitoring`` releases. - -.. toctree:: - :maxdepth: 2 - - changelog \ No newline at end of file diff --git a/monitoring/docs/query.rst b/monitoring/docs/query.rst deleted file mode 100644 index d52fd7f19077..000000000000 --- a/monitoring/docs/query.rst +++ /dev/null @@ -1,6 +0,0 @@ -Time Series Query -================= - -.. automodule:: google.cloud.monitoring_v3.query - :members: - :show-inheritance: diff --git a/monitoring/google/__init__.py b/monitoring/google/__init__.py deleted file mode 100644 index 9a1b64a6d586..000000000000 --- a/monitoring/google/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/monitoring/google/cloud/__init__.py b/monitoring/google/cloud/__init__.py deleted file mode 100644 index 9a1b64a6d586..000000000000 --- a/monitoring/google/cloud/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/monitoring/google/cloud/monitoring.py b/monitoring/google/cloud/monitoring.py deleted file mode 100644 index 42c574b1f159..000000000000 --- a/monitoring/google/cloud/monitoring.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import - -from google.cloud.monitoring_v3 import AlertPolicyServiceClient -from google.cloud.monitoring_v3 import GroupServiceClient -from google.cloud.monitoring_v3 import MetricServiceClient -from google.cloud.monitoring_v3 import NotificationChannelServiceClient -from google.cloud.monitoring_v3 import ServiceMonitoringServiceClient -from google.cloud.monitoring_v3 import UptimeCheckServiceClient -from google.cloud.monitoring_v3 import enums -from google.cloud.monitoring_v3 import types - - -__all__ = ( - "enums", - "types", - "AlertPolicyServiceClient", - "GroupServiceClient", - "MetricServiceClient", - "NotificationChannelServiceClient", - "ServiceMonitoringServiceClient", - "UptimeCheckServiceClient", -) diff --git a/monitoring/google/cloud/monitoring_v3/__init__.py b/monitoring/google/cloud/monitoring_v3/__init__.py deleted file mode 100644 index 17105590c8ab..000000000000 --- a/monitoring/google/cloud/monitoring_v3/__init__.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys -import warnings - -from google.cloud.monitoring_v3 import types -from google.cloud.monitoring_v3.gapic import alert_policy_service_client -from google.cloud.monitoring_v3.gapic import enums -from google.cloud.monitoring_v3.gapic import group_service_client -from google.cloud.monitoring_v3.gapic import metric_service_client -from google.cloud.monitoring_v3.gapic import ( - notification_channel_service_client as notification_client, -) -from google.cloud.monitoring_v3.gapic import service_monitoring_service_client -from google.cloud.monitoring_v3.gapic import uptime_check_service_client - - -if sys.version_info[:2] == (2, 7): - message = ( - "A future version of this library will drop support for Python 2.7." - "More details about Python 2 support for Google Cloud Client Libraries" - "can be found at https://cloud.google.com/python/docs/python2-sunset/" - ) - warnings.warn(message, DeprecationWarning) - - -class AlertPolicyServiceClient(alert_policy_service_client.AlertPolicyServiceClient): - __doc__ = alert_policy_service_client.AlertPolicyServiceClient.__doc__ - enums = enums - - -class GroupServiceClient(group_service_client.GroupServiceClient): - __doc__ = group_service_client.GroupServiceClient.__doc__ - enums = enums - - -class MetricServiceClient(metric_service_client.MetricServiceClient): - __doc__ = metric_service_client.MetricServiceClient.__doc__ - enums = enums - - -class NotificationChannelServiceClient( - notification_client.NotificationChannelServiceClient -): - __doc__ = notification_client.NotificationChannelServiceClient.__doc__ - enums = enums - - -class ServiceMonitoringServiceClient( - service_monitoring_service_client.ServiceMonitoringServiceClient -): - __doc__ = service_monitoring_service_client.ServiceMonitoringServiceClient.__doc__ - enums = enums - - -class UptimeCheckServiceClient(uptime_check_service_client.UptimeCheckServiceClient): - __doc__ = uptime_check_service_client.UptimeCheckServiceClient.__doc__ - enums = enums - - -__all__ = ( - "enums", - "types", - "AlertPolicyServiceClient", - "GroupServiceClient", - "MetricServiceClient", - "NotificationChannelServiceClient", - "ServiceMonitoringServiceClient", - "UptimeCheckServiceClient", -) diff --git a/monitoring/google/cloud/monitoring_v3/_dataframe.py b/monitoring/google/cloud/monitoring_v3/_dataframe.py deleted file mode 100644 index 5bedb39bc356..000000000000 --- a/monitoring/google/cloud/monitoring_v3/_dataframe.py +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Time series as :mod:`pandas` dataframes.""" - -import itertools - -try: - import pandas -except ImportError: # pragma: NO COVER - pandas = None - -from google.cloud.monitoring_v3.types import TimeSeries - -TOP_RESOURCE_LABELS = ("project_id", "aws_account", "location", "region", "zone") - - -def _extract_header(time_series): - """Return a copy of time_series with the points removed.""" - return TimeSeries( - metric=time_series.metric, - resource=time_series.resource, - metric_kind=time_series.metric_kind, - value_type=time_series.value_type, - ) - - -def _extract_labels(time_series): - """Build the combined resource and metric labels, with resource_type.""" - labels = {"resource_type": time_series.resource.type} - labels.update(time_series.resource.labels) - labels.update(time_series.metric.labels) - return labels - - -def _extract_value(typed_value): - """Extract the value from a TypedValue.""" - value_type = typed_value.WhichOneof("value") - return typed_value.__getattribute__(value_type) - - -def _build_dataframe(time_series_iterable, label=None, labels=None): # pragma: NO COVER - """Build a :mod:`pandas` dataframe out of time series. - - :type time_series_iterable: - iterable over :class:`~google.cloud.monitoring_v3.types.TimeSeries` - :param time_series_iterable: - An iterable (e.g., a query object) yielding time series. - - :type label: str - :param label: - (Optional) The label name to use for the dataframe header. This can be - the name of a resource label or metric label (e.g., - ``"instance_name"``), or the string ``"resource_type"``. - - :type labels: list of strings, or None - :param labels: - A list or tuple of label names to use for the dataframe header. - If more than one label name is provided, the resulting dataframe - will have a multi-level column header. - - Specifying neither ``label`` or ``labels`` results in a dataframe - with a multi-level column header including the resource type and - all available resource and metric labels. - - Specifying both ``label`` and ``labels`` is an error. - - :rtype: :class:`pandas.DataFrame` - :returns: A dataframe where each column represents one time series. - - :raises: :exc:`RuntimeError` if `pandas` is not installed. - """ - if pandas is None: - raise RuntimeError("This method requires `pandas` to be installed.") - - if label is not None: - if labels: - raise ValueError("Cannot specify both `label` and `labels`.") - labels = (label,) - - columns = [] - headers = [] - for time_series in time_series_iterable: - pandas_series = pandas.Series( - data=[_extract_value(point.value) for point in time_series.points], - index=[ - point.interval.end_time.ToNanoseconds() for point in time_series.points - ], - ) - columns.append(pandas_series) - headers.append(_extract_header(time_series)) - - # Implement a smart default of using all available labels. - if labels is None: - resource_labels = set( - itertools.chain.from_iterable(header.resource.labels for header in headers) - ) - metric_labels = set( - itertools.chain.from_iterable(header.metric.labels for header in headers) - ) - labels = ( - ["resource_type"] - + _sorted_resource_labels(resource_labels) - + sorted(metric_labels) - ) - - # Assemble the columns into a DataFrame. - dataframe = pandas.DataFrame.from_records(columns).T - - # Convert the timestamp strings into a DatetimeIndex. - dataframe.index = pandas.to_datetime(dataframe.index) - - # Build a multi-level stack of column headers. Some labels may - # be undefined for some time series. - levels = [] - for key in labels: - level = [_extract_labels(header).get(key, "") for header in headers] - levels.append(level) - - # Build a column Index or MultiIndex. Do not include level names - # in the column header if the user requested a single-level header - # by specifying "label". - dataframe.columns = pandas.MultiIndex.from_arrays( - levels, names=labels if not label else None - ) - - # Sort the rows just in case (since the API doesn't guarantee the - # ordering), and sort the columns lexicographically. - return dataframe.sort_index(axis=0).sort_index(axis=1) - - -def _sorted_resource_labels(labels): - """Sort label names, putting well-known resource labels first.""" - head = [label for label in TOP_RESOURCE_LABELS if label in labels] - tail = sorted(label for label in labels if label not in TOP_RESOURCE_LABELS) - return head + tail diff --git a/monitoring/google/cloud/monitoring_v3/gapic/__init__.py b/monitoring/google/cloud/monitoring_v3/gapic/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/monitoring/google/cloud/monitoring_v3/gapic/alert_policy_service_client.py b/monitoring/google/cloud/monitoring_v3/gapic/alert_policy_service_client.py deleted file mode 100644 index cccc98960a30..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/alert_policy_service_client.py +++ /dev/null @@ -1,701 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.monitoring.v3 AlertPolicyService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.cloud.monitoring_v3.gapic import alert_policy_service_client_config -from google.cloud.monitoring_v3.gapic import enums -from google.cloud.monitoring_v3.gapic.transports import ( - alert_policy_service_grpc_transport, -) -from google.cloud.monitoring_v3.proto import alert_pb2 -from google.cloud.monitoring_v3.proto import alert_service_pb2 -from google.cloud.monitoring_v3.proto import alert_service_pb2_grpc -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-monitoring" -).version - - -class AlertPolicyServiceClient(object): - """ - The AlertPolicyService API is used to manage (list, create, delete, - edit) alert policies in Stackdriver Monitoring. An alerting policy is a - description of the conditions under which some aspect of your system is - considered to be "unhealthy" and the ways to notify people or services - about this state. In addition to using this API, alert policies can also - be managed through `Stackdriver - Monitoring `__, which can be - reached by clicking the "Monitoring" tab in `Cloud - Console `__. - """ - - SERVICE_ADDRESS = "monitoring.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.monitoring.v3.AlertPolicyService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AlertPolicyServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def alert_policy_path(cls, project, alert_policy): - """Return a fully-qualified alert_policy string.""" - return google.api_core.path_template.expand( - "projects/{project}/alertPolicies/{alert_policy}", - project=project, - alert_policy=alert_policy, - ) - - @classmethod - def alert_policy_condition_path(cls, project, alert_policy, condition): - """Return a fully-qualified alert_policy_condition string.""" - return google.api_core.path_template.expand( - "projects/{project}/alertPolicies/{alert_policy}/conditions/{condition}", - project=project, - alert_policy=alert_policy, - condition=condition, - ) - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.AlertPolicyServiceGrpcTransport, - Callable[[~.Credentials, type], ~.AlertPolicyServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = alert_policy_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=alert_policy_service_grpc_transport.AlertPolicyServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = alert_policy_service_grpc_transport.AlertPolicyServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def list_alert_policies( - self, - name, - filter_=None, - order_by=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the existing alerting policies for the project. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.AlertPolicyServiceClient() - >>> - >>> name = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_alert_policies(name): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_alert_policies(name).pages: - ... for element in page: - ... # process element - ... pass - - Args: - name (str): The project whose alert policies are to be listed. The format is - - projects/[PROJECT_ID] - - Note that this field names the parent container in which the alerting - policies to be listed are stored. To retrieve a single alerting policy - by name, use the ``GetAlertPolicy`` operation, instead. - filter_ (str): If provided, this field specifies the criteria that must be met by alert - policies to be included in the response. - - For more details, see `sorting and - filtering `__. - order_by (str): A comma-separated list of fields by which to sort the result. Supports - the same set of field references as the ``filter`` field. Entries can be - prefixed with a minus sign to sort by the field in descending order. - - For more details, see `sorting and - filtering `__. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.monitoring_v3.types.AlertPolicy` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "list_alert_policies" not in self._inner_api_calls: - self._inner_api_calls[ - "list_alert_policies" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_alert_policies, - default_retry=self._method_configs["ListAlertPolicies"].retry, - default_timeout=self._method_configs["ListAlertPolicies"].timeout, - client_info=self._client_info, - ) - - request = alert_service_pb2.ListAlertPoliciesRequest( - name=name, filter=filter_, order_by=order_by, page_size=page_size - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_alert_policies"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="alert_policies", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_alert_policy( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a single alerting policy. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.AlertPolicyServiceClient() - >>> - >>> name = client.alert_policy_path('[PROJECT]', '[ALERT_POLICY]') - >>> - >>> response = client.get_alert_policy(name) - - Args: - name (str): The alerting policy to retrieve. The format is - - projects/[PROJECT_ID]/alertPolicies/[ALERT_POLICY_ID] - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.AlertPolicy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "get_alert_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "get_alert_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_alert_policy, - default_retry=self._method_configs["GetAlertPolicy"].retry, - default_timeout=self._method_configs["GetAlertPolicy"].timeout, - client_info=self._client_info, - ) - - request = alert_service_pb2.GetAlertPolicyRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_alert_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_alert_policy( - self, - name, - alert_policy, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new alerting policy. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.AlertPolicyServiceClient() - >>> - >>> name = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `alert_policy`: - >>> alert_policy = {} - >>> - >>> response = client.create_alert_policy(name, alert_policy) - - Args: - name (str): The project in which to create the alerting policy. The format is - ``projects/[PROJECT_ID]``. - - Note that this field names the parent container in which the alerting - policy will be written, not the name of the created policy. The alerting - policy that is returned will have a name that contains a normalized - representation of this name as a prefix but adds a suffix of the form - ``/alertPolicies/[POLICY_ID]``, identifying the policy in the container. - alert_policy (Union[dict, ~google.cloud.monitoring_v3.types.AlertPolicy]): The requested alerting policy. You should omit the ``name`` field in - this policy. The name will be returned in the new policy, including a - new [ALERT\_POLICY\_ID] value. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.AlertPolicy` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.AlertPolicy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "create_alert_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "create_alert_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_alert_policy, - default_retry=self._method_configs["CreateAlertPolicy"].retry, - default_timeout=self._method_configs["CreateAlertPolicy"].timeout, - client_info=self._client_info, - ) - - request = alert_service_pb2.CreateAlertPolicyRequest( - name=name, alert_policy=alert_policy - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_alert_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_alert_policy( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an alerting policy. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.AlertPolicyServiceClient() - >>> - >>> name = client.alert_policy_path('[PROJECT]', '[ALERT_POLICY]') - >>> - >>> client.delete_alert_policy(name) - - Args: - name (str): The alerting policy to delete. The format is: - - projects/[PROJECT_ID]/alertPolicies/[ALERT_POLICY_ID] - - For more information, see ``AlertPolicy``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "delete_alert_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_alert_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_alert_policy, - default_retry=self._method_configs["DeleteAlertPolicy"].retry, - default_timeout=self._method_configs["DeleteAlertPolicy"].timeout, - client_info=self._client_info, - ) - - request = alert_service_pb2.DeleteAlertPolicyRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_alert_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_alert_policy( - self, - alert_policy, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates an alerting policy. You can either replace the entire policy - with a new one or replace only certain fields in the current alerting - policy by specifying the fields to be updated via ``updateMask``. - Returns the updated alerting policy. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.AlertPolicyServiceClient() - >>> - >>> # TODO: Initialize `alert_policy`: - >>> alert_policy = {} - >>> - >>> response = client.update_alert_policy(alert_policy) - - Args: - alert_policy (Union[dict, ~google.cloud.monitoring_v3.types.AlertPolicy]): Required. The updated alerting policy or the updated values for the - fields listed in ``update_mask``. If ``update_mask`` is not empty, any - fields in this policy that are not in ``update_mask`` are ignored. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.AlertPolicy` - update_mask (Union[dict, ~google.cloud.monitoring_v3.types.FieldMask]): Optional. A list of alerting policy field names. If this field is not - empty, each listed field in the existing alerting policy is set to the - value of the corresponding field in the supplied policy - (``alert_policy``), or to the field's default value if the field is not - in the supplied alerting policy. Fields not listed retain their previous - value. - - Examples of valid field masks include ``display_name``, - ``documentation``, ``documentation.content``, - ``documentation.mime_type``, ``user_labels``, ``user_label.nameofkey``, - ``enabled``, ``conditions``, ``combiner``, etc. - - If this field is empty, then the supplied alerting policy replaces the - existing policy. It is the same as deleting the existing policy and - adding the supplied policy, except for the following: - - - The new policy will have the same ``[ALERT_POLICY_ID]`` as the former - policy. This gives you continuity with the former policy in your - notifications and incidents. - - Conditions in the new policy will keep their former - ``[CONDITION_ID]`` if the supplied condition includes the ``name`` - field with that ``[CONDITION_ID]``. If the supplied condition omits - the ``name`` field, then a new ``[CONDITION_ID]`` is created. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.AlertPolicy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "update_alert_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "update_alert_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_alert_policy, - default_retry=self._method_configs["UpdateAlertPolicy"].retry, - default_timeout=self._method_configs["UpdateAlertPolicy"].timeout, - client_info=self._client_info, - ) - - request = alert_service_pb2.UpdateAlertPolicyRequest( - alert_policy=alert_policy, update_mask=update_mask - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("alert_policy.name", alert_policy.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_alert_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/monitoring/google/cloud/monitoring_v3/gapic/alert_policy_service_client_config.py b/monitoring/google/cloud/monitoring_v3/gapic/alert_policy_service_client_config.py deleted file mode 100644 index 5aed862abe3a..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/alert_policy_service_client_config.py +++ /dev/null @@ -1,48 +0,0 @@ -config = { - "interfaces": { - "google.monitoring.v3.AlertPolicyService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "ListAlertPolicies": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetAlertPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateAlertPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteAlertPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "UpdateAlertPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/monitoring/google/cloud/monitoring_v3/gapic/enums.py b/monitoring/google/cloud/monitoring_v3/gapic/enums.py deleted file mode 100644 index 017d94fd6a42..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/enums.py +++ /dev/null @@ -1,612 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class CalendarPeriod(enum.IntEnum): - """ - A ``CalendarPeriod`` represents the abstract concept of a time period - that has a canonical start. Grammatically, "the start of the current - ``CalendarPeriod``." All calendar times begin at midnight UTC. - - Attributes: - CALENDAR_PERIOD_UNSPECIFIED (int): Undefined period, raises an error. - DAY (int): A day. - WEEK (int): A week. Weeks begin on Monday, following `ISO - 8601 `__. - FORTNIGHT (int): A fortnight. The first calendar fortnight of the year begins at the - start of week 1 according to `ISO - 8601 `__. - MONTH (int): A month. - QUARTER (int): A quarter. Quarters start on dates 1-Jan, 1-Apr, 1-Jul, and 1-Oct of each - year. - HALF (int): A half-year. Half-years start on dates 1-Jan and 1-Jul. - YEAR (int): A year. - """ - - CALENDAR_PERIOD_UNSPECIFIED = 0 - DAY = 1 - WEEK = 2 - FORTNIGHT = 3 - MONTH = 4 - QUARTER = 5 - HALF = 6 - YEAR = 7 - - -class ComparisonType(enum.IntEnum): - """ - Specifies an ordering relationship on two arguments, here called left and - right. - - Attributes: - COMPARISON_UNSPECIFIED (int): No ordering relationship is specified. - COMPARISON_GT (int): The left argument is greater than the right argument. - COMPARISON_GE (int): The left argument is greater than or equal to the right argument. - COMPARISON_LT (int): The left argument is less than the right argument. - COMPARISON_LE (int): The left argument is less than or equal to the right argument. - COMPARISON_EQ (int): The left argument is equal to the right argument. - COMPARISON_NE (int): The left argument is not equal to the right argument. - """ - - COMPARISON_UNSPECIFIED = 0 - COMPARISON_GT = 1 - COMPARISON_GE = 2 - COMPARISON_LT = 3 - COMPARISON_LE = 4 - COMPARISON_EQ = 5 - COMPARISON_NE = 6 - - -class GroupResourceType(enum.IntEnum): - """ - The supported resource types that can be used as values of - ``group_resource.resource_type``. ``INSTANCE`` includes ``gce_instance`` - and ``aws_ec2_instance`` resource types. The resource types ``gae_app`` - and ``uptime_url`` are not valid here because group checks on App Engine - modules and URLs are not allowed. - - Attributes: - RESOURCE_TYPE_UNSPECIFIED (int): Default value (not valid). - INSTANCE (int): A group of instances from Google Cloud Platform (GCP) or - Amazon Web Services (AWS). - AWS_ELB_LOAD_BALANCER (int): A group of Amazon ELB load balancers. - """ - - RESOURCE_TYPE_UNSPECIFIED = 0 - INSTANCE = 1 - AWS_ELB_LOAD_BALANCER = 2 - - -class LaunchStage(enum.IntEnum): - """ - The launch stage as defined by `Google Cloud Platform Launch - Stages `__. - - Attributes: - LAUNCH_STAGE_UNSPECIFIED (int): Do not use this default value. - EARLY_ACCESS (int): Early Access features are limited to a closed group of testers. To use - these features, you must sign up in advance and sign a Trusted Tester - agreement (which includes confidentiality provisions). These features may - be unstable, changed in backward-incompatible ways, and are not - guaranteed to be released. - ALPHA (int): Alpha is a limited availability test for releases before they are cleared - for widespread use. By Alpha, all significant design issues are resolved - and we are in the process of verifying functionality. Alpha customers - need to apply for access, agree to applicable terms, and have their - projects whitelisted. Alpha releases don’t have to be feature complete, - no SLAs are provided, and there are no technical support obligations, but - they will be far enough along that customers can actually use them in - test environments or for limited-use tests -- just like they would in - normal production cases. - BETA (int): Beta is the point at which we are ready to open a release for any - customer to use. There are no SLA or technical support obligations in a - Beta release. Products will be complete from a feature perspective, but - may have some open outstanding issues. Beta releases are suitable for - limited production use cases. - GA (int): GA features are open to all developers and are considered stable and - fully qualified for production use. - DEPRECATED (int): Deprecated features are scheduled to be shut down and removed. For more - information, see the “Deprecation Policy” section of our `Terms of - Service `__ and the `Google Cloud - Platform Subject to the Deprecation - Policy `__ documentation. - """ - - LAUNCH_STAGE_UNSPECIFIED = 0 - EARLY_ACCESS = 1 - ALPHA = 2 - BETA = 3 - GA = 4 - DEPRECATED = 5 - - -class NullValue(enum.IntEnum): - """ - ``NullValue`` is a singleton enumeration to represent the null value for - the ``Value`` type union. - - The JSON representation for ``NullValue`` is JSON ``null``. - - Attributes: - NULL_VALUE (int): Null value. - """ - - NULL_VALUE = 0 - - -class ServiceTier(enum.IntEnum): - """ - The tier of service for a Workspace. Please see the `service tiers - documentation `__ - for more details. - - Attributes: - SERVICE_TIER_UNSPECIFIED (int): An invalid sentinel value, used to indicate that a tier has not - been provided explicitly. - SERVICE_TIER_BASIC (int): The Stackdriver Basic tier, a free tier of service that provides basic - features, a moderate allotment of logs, and access to built-in metrics. - A number of features are not available in this tier. For more details, - see `the service tiers - documentation `__. - SERVICE_TIER_PREMIUM (int): The Stackdriver Premium tier, a higher, more expensive tier of service - that provides access to all Stackdriver features, lets you use - Stackdriver with AWS accounts, and has a larger allotments for logs and - metrics. For more details, see `the service tiers - documentation `__. - """ - - SERVICE_TIER_UNSPECIFIED = 0 - SERVICE_TIER_BASIC = 1 - SERVICE_TIER_PREMIUM = 2 - - -class UptimeCheckRegion(enum.IntEnum): - """ - The regions from which an Uptime check can be run. - - Attributes: - REGION_UNSPECIFIED (int): Default value if no region is specified. Will result in Uptime checks - running from all regions. - USA (int): Allows checks to run from locations within the United States of America. - EUROPE (int): Allows checks to run from locations within the continent of Europe. - SOUTH_AMERICA (int): Allows checks to run from locations within the continent of South - America. - ASIA_PACIFIC (int): Allows checks to run from locations within the Asia Pacific area (ex: - Singapore). - """ - - REGION_UNSPECIFIED = 0 - USA = 1 - EUROPE = 2 - SOUTH_AMERICA = 3 - ASIA_PACIFIC = 4 - - -class Aggregation(object): - class Aligner(enum.IntEnum): - """ - The Aligner describes how to bring the data points in a single - time series into temporal alignment. - - Attributes: - ALIGN_NONE (int): No alignment. Raw data is returned. Not valid if cross-time - series reduction is requested. The value type of the result is - the same as the value type of the input. - ALIGN_DELTA (int): Align and convert to delta metric type. This alignment is valid for - cumulative metrics and delta metrics. Aligning an existing delta metric - to a delta metric requires that the alignment period be increased. The - value type of the result is the same as the value type of the input. - - One can think of this aligner as a rate but without time units; that is, - the output is conceptually (second\_point - first\_point). - ALIGN_RATE (int): Align and convert to a rate. This alignment is valid for cumulative - metrics and delta metrics with numeric values. The output is a gauge - metric with value type ``DOUBLE``. - - One can think of this aligner as conceptually providing the slope of the - line that passes through the value at the start and end of the window. - In other words, this is conceptually ((y1 - y0)/(t1 - t0)), and the - output unit is one that has a "/time" dimension. - - If, by rate, you are looking for percentage change, see the - ``ALIGN_PERCENT_CHANGE`` aligner option. - ALIGN_INTERPOLATE (int): Align by interpolating between adjacent points around the - period boundary. This alignment is valid for gauge - metrics with numeric values. The value type of the result is the same - as the value type of the input. - ALIGN_NEXT_OLDER (int): Align by shifting the oldest data point before the period - boundary to the boundary. This alignment is valid for gauge - metrics. The value type of the result is the same as the - value type of the input. - ALIGN_MIN (int): Align time series via aggregation. The resulting data point in - the alignment period is the minimum of all data points in the - period. This alignment is valid for gauge and delta metrics with numeric - values. The value type of the result is the same as the value - type of the input. - ALIGN_MAX (int): Align time series via aggregation. The resulting data point in - the alignment period is the maximum of all data points in the - period. This alignment is valid for gauge and delta metrics with numeric - values. The value type of the result is the same as the value - type of the input. - ALIGN_MEAN (int): Align time series via aggregation. The resulting data point in the - alignment period is the average or arithmetic mean of all data points in - the period. This alignment is valid for gauge and delta metrics with - numeric values. The value type of the output is ``DOUBLE``. - ALIGN_COUNT (int): Align time series via aggregation. The resulting data point in the - alignment period is the count of all data points in the period. This - alignment is valid for gauge and delta metrics with numeric or Boolean - values. The value type of the output is ``INT64``. - ALIGN_SUM (int): Align time series via aggregation. The resulting data point in - the alignment period is the sum of all data points in the - period. This alignment is valid for gauge and delta metrics with numeric - and distribution values. The value type of the output is the - same as the value type of the input. - ALIGN_STDDEV (int): Align time series via aggregation. The resulting data point in the - alignment period is the standard deviation of all data points in the - period. This alignment is valid for gauge and delta metrics with numeric - values. The value type of the output is ``DOUBLE``. - ALIGN_COUNT_TRUE (int): Align time series via aggregation. The resulting data point in the - alignment period is the count of True-valued data points in the period. - This alignment is valid for gauge metrics with Boolean values. The value - type of the output is ``INT64``. - ALIGN_COUNT_FALSE (int): Align time series via aggregation. The resulting data point in the - alignment period is the count of False-valued data points in the period. - This alignment is valid for gauge metrics with Boolean values. The value - type of the output is ``INT64``. - ALIGN_FRACTION_TRUE (int): Align time series via aggregation. The resulting data point in the - alignment period is the fraction of True-valued data points in the - period. This alignment is valid for gauge metrics with Boolean values. - The output value is in the range [0, 1] and has value type ``DOUBLE``. - ALIGN_PERCENTILE_99 (int): Align time series via aggregation. The resulting data point in the - alignment period is the 99th percentile of all data points in the - period. This alignment is valid for gauge and delta metrics with - distribution values. The output is a gauge metric with value type - ``DOUBLE``. - ALIGN_PERCENTILE_95 (int): Align time series via aggregation. The resulting data point in the - alignment period is the 95th percentile of all data points in the - period. This alignment is valid for gauge and delta metrics with - distribution values. The output is a gauge metric with value type - ``DOUBLE``. - ALIGN_PERCENTILE_50 (int): Align time series via aggregation. The resulting data point in the - alignment period is the 50th percentile of all data points in the - period. This alignment is valid for gauge and delta metrics with - distribution values. The output is a gauge metric with value type - ``DOUBLE``. - ALIGN_PERCENTILE_05 (int): Align time series via aggregation. The resulting data point in the - alignment period is the 5th percentile of all data points in the period. - This alignment is valid for gauge and delta metrics with distribution - values. The output is a gauge metric with value type ``DOUBLE``. - ALIGN_PERCENT_CHANGE (int): Align and convert to a percentage change. This alignment is valid for - gauge and delta metrics with numeric values. This alignment conceptually - computes the equivalent of "((current - previous)/previous)\*100" where - previous value is determined based on the alignmentPeriod. In the event - that previous is 0 the calculated value is infinity with the exception - that if both (current - previous) and previous are 0 the calculated - value is 0. A 10 minute moving mean is computed at each point of the - time window prior to the above calculation to smooth the metric and - prevent false positives from very short lived spikes. Only applicable - for data that is >= 0. Any values < 0 are treated as no data. While - delta metrics are accepted by this alignment special care should be - taken that the values for the metric will always be positive. The output - is a gauge metric with value type ``DOUBLE``. - """ - - ALIGN_NONE = 0 - ALIGN_DELTA = 1 - ALIGN_RATE = 2 - ALIGN_INTERPOLATE = 3 - ALIGN_NEXT_OLDER = 4 - ALIGN_MIN = 10 - ALIGN_MAX = 11 - ALIGN_MEAN = 12 - ALIGN_COUNT = 13 - ALIGN_SUM = 14 - ALIGN_STDDEV = 15 - ALIGN_COUNT_TRUE = 16 - ALIGN_COUNT_FALSE = 24 - ALIGN_FRACTION_TRUE = 17 - ALIGN_PERCENTILE_99 = 18 - ALIGN_PERCENTILE_95 = 19 - ALIGN_PERCENTILE_50 = 20 - ALIGN_PERCENTILE_05 = 21 - ALIGN_PERCENT_CHANGE = 23 - - class Reducer(enum.IntEnum): - """ - A Reducer describes how to aggregate data points from multiple - time series into a single time series. - - Attributes: - REDUCE_NONE (int): No cross-time series reduction. The output of the aligner is - returned. - REDUCE_MEAN (int): Reduce by computing the mean across time series for each alignment - period. This reducer is valid for delta and gauge metrics with numeric - or distribution values. The value type of the output is ``DOUBLE``. - REDUCE_MIN (int): Reduce by computing the minimum across time series for each - alignment period. This reducer is valid for delta and - gauge metrics with numeric values. The value type of the output - is the same as the value type of the input. - REDUCE_MAX (int): Reduce by computing the maximum across time series for each - alignment period. This reducer is valid for delta and - gauge metrics with numeric values. The value type of the output - is the same as the value type of the input. - REDUCE_SUM (int): Reduce by computing the sum across time series for each - alignment period. This reducer is valid for delta and - gauge metrics with numeric and distribution values. The value type of - the output is the same as the value type of the input. - REDUCE_STDDEV (int): Reduce by computing the standard deviation across time series for each - alignment period. This reducer is valid for delta and gauge metrics with - numeric or distribution values. The value type of the output is - ``DOUBLE``. - REDUCE_COUNT (int): Reduce by computing the count of data points across time series for each - alignment period. This reducer is valid for delta and gauge metrics of - numeric, Boolean, distribution, and string value type. The value type of - the output is ``INT64``. - REDUCE_COUNT_TRUE (int): Reduce by computing the count of True-valued data points across time - series for each alignment period. This reducer is valid for delta and - gauge metrics of Boolean value type. The value type of the output is - ``INT64``. - REDUCE_COUNT_FALSE (int): Reduce by computing the count of False-valued data points across time - series for each alignment period. This reducer is valid for delta and - gauge metrics of Boolean value type. The value type of the output is - ``INT64``. - REDUCE_FRACTION_TRUE (int): Reduce by computing the fraction of True-valued data points across time - series for each alignment period. This reducer is valid for delta and - gauge metrics of Boolean value type. The output value is in the range - [0, 1] and has value type ``DOUBLE``. - REDUCE_PERCENTILE_99 (int): Reduce by computing 99th percentile of data points across time series - for each alignment period. This reducer is valid for gauge and delta - metrics of numeric and distribution type. The value of the output is - ``DOUBLE`` - REDUCE_PERCENTILE_95 (int): Reduce by computing 95th percentile of data points across time series - for each alignment period. This reducer is valid for gauge and delta - metrics of numeric and distribution type. The value of the output is - ``DOUBLE`` - REDUCE_PERCENTILE_50 (int): Reduce by computing 50th percentile of data points across time series - for each alignment period. This reducer is valid for gauge and delta - metrics of numeric and distribution type. The value of the output is - ``DOUBLE`` - REDUCE_PERCENTILE_05 (int): Reduce by computing 5th percentile of data points across time series for - each alignment period. This reducer is valid for gauge and delta metrics - of numeric and distribution type. The value of the output is ``DOUBLE`` - """ - - REDUCE_NONE = 0 - REDUCE_MEAN = 1 - REDUCE_MIN = 2 - REDUCE_MAX = 3 - REDUCE_SUM = 4 - REDUCE_STDDEV = 5 - REDUCE_COUNT = 6 - REDUCE_COUNT_TRUE = 7 - REDUCE_COUNT_FALSE = 15 - REDUCE_FRACTION_TRUE = 8 - REDUCE_PERCENTILE_99 = 9 - REDUCE_PERCENTILE_95 = 10 - REDUCE_PERCENTILE_50 = 11 - REDUCE_PERCENTILE_05 = 12 - - -class AlertPolicy(object): - class ConditionCombinerType(enum.IntEnum): - """ - Operators for combining conditions. - - Attributes: - COMBINE_UNSPECIFIED (int): An unspecified combiner. - AND (int): Combine conditions using the logical ``AND`` operator. An incident is - created only if all conditions are met simultaneously. This combiner is - satisfied if all conditions are met, even if they are met on completely - different resources. - OR (int): Combine conditions using the logical ``OR`` operator. An incident is - created if any of the listed conditions is met. - AND_WITH_MATCHING_RESOURCE (int): Combine conditions using logical ``AND`` operator, but unlike the - regular ``AND`` option, an incident is created only if all conditions - are met simultaneously on at least one resource. - """ - - COMBINE_UNSPECIFIED = 0 - AND = 1 - OR = 2 - AND_WITH_MATCHING_RESOURCE = 3 - - -class InternalChecker(object): - class State(enum.IntEnum): - """ - Operational states for an internal checker. - - Attributes: - UNSPECIFIED (int): An internal checker should never be in the unspecified state. - CREATING (int): The checker is being created, provisioned, and configured. A checker in - this state can be returned by ``ListInternalCheckers`` or - ``GetInternalChecker``, as well as by examining the `long running - Operation `__ - that created it. - RUNNING (int): The checker is running and available for use. A checker in this state - can be returned by ``ListInternalCheckers`` or ``GetInternalChecker`` as - well as by examining the `long running - Operation `__ - that created it. If a checker is being torn down, it is neither visible - nor usable, so there is no "deleting" or "down" state. - """ - - UNSPECIFIED = 0 - CREATING = 1 - RUNNING = 2 - - -class LabelDescriptor(object): - class ValueType(enum.IntEnum): - """ - Value types that can be used as label values. - - Attributes: - STRING (int): A variable-length string. This is the default. - BOOL (int): Boolean; true or false. - INT64 (int): A 64-bit signed integer. - """ - - STRING = 0 - BOOL = 1 - INT64 = 2 - - -class ListTimeSeriesRequest(object): - class TimeSeriesView(enum.IntEnum): - """ - Controls which fields are returned by ``ListTimeSeries``. - - Attributes: - FULL (int): Returns the identity of the metric(s), the time series, - and the time series data. - HEADERS (int): Returns the identity of the metric and the time series resource, - but not the time series data. - """ - - FULL = 0 - HEADERS = 1 - - -class MetricDescriptor(object): - class MetricKind(enum.IntEnum): - """ - The kind of measurement. It describes how the data is reported. - - Attributes: - METRIC_KIND_UNSPECIFIED (int): Do not use this default value. - GAUGE (int): An instantaneous measurement of a value. - DELTA (int): The change in a value during a time interval. - CUMULATIVE (int): A value accumulated over a time interval. Cumulative - measurements in a time series should have the same start time - and increasing end times, until an event resets the cumulative - value to zero and sets a new start time for the following - points. - """ - - METRIC_KIND_UNSPECIFIED = 0 - GAUGE = 1 - DELTA = 2 - CUMULATIVE = 3 - - class ValueType(enum.IntEnum): - """ - The value type of a metric. - - Attributes: - VALUE_TYPE_UNSPECIFIED (int): Do not use this default value. - BOOL (int): The value is a boolean. This value type can be used only if the metric - kind is ``GAUGE``. - INT64 (int): The value is a signed 64-bit integer. - DOUBLE (int): The value is a double precision floating point number. - STRING (int): The value is a text string. This value type can be used only if the - metric kind is ``GAUGE``. - DISTRIBUTION (int): The value is a ``Distribution``. - MONEY (int): The value is money. - """ - - VALUE_TYPE_UNSPECIFIED = 0 - BOOL = 1 - INT64 = 2 - DOUBLE = 3 - STRING = 4 - DISTRIBUTION = 5 - MONEY = 6 - - -class NotificationChannel(object): - class VerificationStatus(enum.IntEnum): - """ - Indicates whether the channel has been verified or not. It is illegal to - specify this field in a ``CreateNotificationChannel`` or an - ``UpdateNotificationChannel`` operation. - - Attributes: - VERIFICATION_STATUS_UNSPECIFIED (int): Sentinel value used to indicate that the state is unknown, omitted, or - is not applicable (as in the case of channels that neither support - nor require verification in order to function). - UNVERIFIED (int): The channel has yet to be verified and requires verification to function. - Note that this state also applies to the case where the verification - process has been initiated by sending a verification code but where - the verification code has not been submitted to complete the process. - VERIFIED (int): It has been proven that notifications can be received on this - notification channel and that someone on the project has access - to messages that are delivered to that channel. - """ - - VERIFICATION_STATUS_UNSPECIFIED = 0 - UNVERIFIED = 1 - VERIFIED = 2 - - -class ServiceLevelObjective(object): - class View(enum.IntEnum): - """ - ``ServiceLevelObjective.View`` determines what form of - ``ServiceLevelObjective`` is returned from ``GetServiceLevelObjective``, - ``ListServiceLevelObjectives``, and - ``ListServiceLevelObjectiveVersions`` RPCs. - - Attributes: - VIEW_UNSPECIFIED (int): Same as FULL. - FULL (int): Return the embedded ``ServiceLevelIndicator`` in the form in which it - was defined. If it was defined using a ``BasicSli``, return that - ``BasicSli``. - EXPLICIT (int): For ``ServiceLevelIndicator``\ s using ``BasicSli`` articulation, - instead return the ``ServiceLevelIndicator`` with its mode of - computation fully spelled out as a ``RequestBasedSli``. For - ``ServiceLevelIndicator``\ s using ``RequestBasedSli`` or - ``WindowsBasedSli``, return the ``ServiceLevelIndicator`` as it was - provided. - """ - - VIEW_UNSPECIFIED = 0 - FULL = 2 - EXPLICIT = 1 - - -class UptimeCheckConfig(object): - class ContentMatcher(object): - class ContentMatcherOption(enum.IntEnum): - """ - Options to perform content matching. - - Attributes: - CONTENT_MATCHER_OPTION_UNSPECIFIED (int): No content matcher type specified (maintained for backward - compatibility, but deprecated for future use). Treated as - ``CONTAINS_STRING``. - CONTAINS_STRING (int): Selects substring matching. The match succeeds if the output contains - the ``content`` string. This is the default value for checks without a - ``matcher`` option, or where the value of ``matcher`` is - ``CONTENT_MATCHER_OPTION_UNSPECIFIED``. - NOT_CONTAINS_STRING (int): Selects negation of substring matching. The match succeeds if the output - does *NOT* contain the ``content`` string. - MATCHES_REGEX (int): Selects regular-expression matching. The match succeeds of the output - matches the regular expression specified in the ``content`` string. - NOT_MATCHES_REGEX (int): Selects negation of regular-expression matching. The match succeeds if - the output does *NOT* match the regular expression specified in the - ``content`` string. - """ - - CONTENT_MATCHER_OPTION_UNSPECIFIED = 0 - CONTAINS_STRING = 1 - NOT_CONTAINS_STRING = 2 - MATCHES_REGEX = 3 - NOT_MATCHES_REGEX = 4 diff --git a/monitoring/google/cloud/monitoring_v3/gapic/group_service_client.py b/monitoring/google/cloud/monitoring_v3/gapic/group_service_client.py deleted file mode 100644 index 346c71820c92..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/group_service_client.py +++ /dev/null @@ -1,797 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.monitoring.v3 GroupService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import google.api_core.protobuf_helpers -import grpc - -from google.cloud.monitoring_v3.gapic import enums -from google.cloud.monitoring_v3.gapic import group_service_client_config -from google.cloud.monitoring_v3.gapic.transports import group_service_grpc_transport -from google.cloud.monitoring_v3.proto import alert_pb2 -from google.cloud.monitoring_v3.proto import alert_service_pb2 -from google.cloud.monitoring_v3.proto import alert_service_pb2_grpc -from google.cloud.monitoring_v3.proto import common_pb2 -from google.cloud.monitoring_v3.proto import group_pb2 -from google.cloud.monitoring_v3.proto import group_service_pb2 -from google.cloud.monitoring_v3.proto import group_service_pb2_grpc -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-monitoring" -).version - - -class GroupServiceClient(object): - """ - The Group API lets you inspect and manage your - `groups <#google.monitoring.v3.Group>`__. - - A group is a named filter that is used to identify a collection of - monitored resources. Groups are typically used to mirror the physical - and/or logical topology of the environment. Because group membership is - computed dynamically, monitored resources that are started in the future - are automatically placed in matching groups. By using a group to name - monitored resources in, for example, an alert policy, the target of that - alert policy is updated automatically as monitored resources are added - and removed from the infrastructure. - """ - - SERVICE_ADDRESS = "monitoring.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.monitoring.v3.GroupService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - GroupServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def group_path(cls, project, group): - """Return a fully-qualified group string.""" - return google.api_core.path_template.expand( - "projects/{project}/groups/{group}", project=project, group=group - ) - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.GroupServiceGrpcTransport, - Callable[[~.Credentials, type], ~.GroupServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = group_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=group_service_grpc_transport.GroupServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = group_service_grpc_transport.GroupServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def list_groups( - self, - name, - children_of_group=None, - ancestors_of_group=None, - descendants_of_group=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the existing groups. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.GroupServiceClient() - >>> - >>> name = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_groups(name): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_groups(name).pages: - ... for element in page: - ... # process element - ... pass - - Args: - name (str): The project whose groups are to be listed. The format is - ``"projects/{project_id_or_number}"``. - children_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``. - Returns groups whose ``parentName`` field contains the group name. If no - groups have this parent, the results are empty. - ancestors_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``. - Returns groups that are ancestors of the specified group. The groups are - returned in order, starting with the immediate parent and ending with - the most distant ancestor. If the specified group has no immediate - parent, the results are empty. - descendants_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``. - Returns the descendants of the specified group. This is a superset of - the results returned by the ``childrenOfGroup`` filter, and includes - children-of-children, and so forth. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.monitoring_v3.types.Group` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "list_groups" not in self._inner_api_calls: - self._inner_api_calls[ - "list_groups" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_groups, - default_retry=self._method_configs["ListGroups"].retry, - default_timeout=self._method_configs["ListGroups"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - children_of_group=children_of_group, - ancestors_of_group=ancestors_of_group, - descendants_of_group=descendants_of_group, - ) - - request = group_service_pb2.ListGroupsRequest( - name=name, - children_of_group=children_of_group, - ancestors_of_group=ancestors_of_group, - descendants_of_group=descendants_of_group, - page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_groups"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="group", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_group( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a single group. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.GroupServiceClient() - >>> - >>> name = client.group_path('[PROJECT]', '[GROUP]') - >>> - >>> response = client.get_group(name) - - Args: - name (str): The group to retrieve. The format is - ``"projects/{project_id_or_number}/groups/{group_id}"``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.Group` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "get_group" not in self._inner_api_calls: - self._inner_api_calls[ - "get_group" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_group, - default_retry=self._method_configs["GetGroup"].retry, - default_timeout=self._method_configs["GetGroup"].timeout, - client_info=self._client_info, - ) - - request = group_service_pb2.GetGroupRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_group"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_group( - self, - name, - group, - validate_only=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new group. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.GroupServiceClient() - >>> - >>> name = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `group`: - >>> group = {} - >>> - >>> response = client.create_group(name, group) - - Args: - name (str): The project in which to create the group. The format is - ``"projects/{project_id_or_number}"``. - group (Union[dict, ~google.cloud.monitoring_v3.types.Group]): A group definition. It is an error to define the ``name`` field because - the system assigns the name. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.Group` - validate_only (bool): If true, validate this request but do not create the group. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.Group` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "create_group" not in self._inner_api_calls: - self._inner_api_calls[ - "create_group" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_group, - default_retry=self._method_configs["CreateGroup"].retry, - default_timeout=self._method_configs["CreateGroup"].timeout, - client_info=self._client_info, - ) - - request = group_service_pb2.CreateGroupRequest( - name=name, group=group, validate_only=validate_only - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_group"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_group( - self, - group, - validate_only=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates an existing group. You can change any group attributes except - ``name``. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.GroupServiceClient() - >>> - >>> # TODO: Initialize `group`: - >>> group = {} - >>> - >>> response = client.update_group(group) - - Args: - group (Union[dict, ~google.cloud.monitoring_v3.types.Group]): The new definition of the group. All fields of the existing group, - excepting ``name``, are replaced with the corresponding fields of this - group. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.Group` - validate_only (bool): If true, validate this request but do not update the existing group. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.Group` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "update_group" not in self._inner_api_calls: - self._inner_api_calls[ - "update_group" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_group, - default_retry=self._method_configs["UpdateGroup"].retry, - default_timeout=self._method_configs["UpdateGroup"].timeout, - client_info=self._client_info, - ) - - request = group_service_pb2.UpdateGroupRequest( - group=group, validate_only=validate_only - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("group.name", group.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_group"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_group( - self, - name, - recursive=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an existing group. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.GroupServiceClient() - >>> - >>> name = client.group_path('[PROJECT]', '[GROUP]') - >>> - >>> client.delete_group(name) - - Args: - name (str): The group to delete. The format is - ``"projects/{project_id_or_number}/groups/{group_id}"``. - recursive (bool): If this field is true, then the request means to delete a group with all - its descendants. Otherwise, the request means to delete a group only when - it has no descendants. The default value is false. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "delete_group" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_group" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_group, - default_retry=self._method_configs["DeleteGroup"].retry, - default_timeout=self._method_configs["DeleteGroup"].timeout, - client_info=self._client_info, - ) - - request = group_service_pb2.DeleteGroupRequest(name=name, recursive=recursive) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_group"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_group_members( - self, - name, - page_size=None, - filter_=None, - interval=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the monitored resources that are members of a group. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.GroupServiceClient() - >>> - >>> name = client.group_path('[PROJECT]', '[GROUP]') - >>> - >>> # Iterate over all results - >>> for element in client.list_group_members(name): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_group_members(name).pages: - ... for element in page: - ... # process element - ... pass - - Args: - name (str): The group whose members are listed. The format is - ``"projects/{project_id_or_number}/groups/{group_id}"``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - filter_ (str): An optional `list - filter `__ - describing the members to be returned. The filter may reference the - type, labels, and metadata of monitored resources that comprise the - group. For example, to return only resources representing Compute Engine - VM instances, use this filter: - - :: - - resource.type = "gce_instance" - interval (Union[dict, ~google.cloud.monitoring_v3.types.TimeInterval]): An optional time interval for which results should be returned. Only - members that were part of the group during the specified interval are - included in the response. If no interval is provided then the group - membership over the last minute is returned. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.TimeInterval` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.monitoring_v3.types.MonitoredResource` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "list_group_members" not in self._inner_api_calls: - self._inner_api_calls[ - "list_group_members" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_group_members, - default_retry=self._method_configs["ListGroupMembers"].retry, - default_timeout=self._method_configs["ListGroupMembers"].timeout, - client_info=self._client_info, - ) - - request = group_service_pb2.ListGroupMembersRequest( - name=name, page_size=page_size, filter=filter_, interval=interval - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_group_members"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="members", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator diff --git a/monitoring/google/cloud/monitoring_v3/gapic/group_service_client_config.py b/monitoring/google/cloud/monitoring_v3/gapic/group_service_client_config.py deleted file mode 100644 index 786ec6bab83c..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/group_service_client_config.py +++ /dev/null @@ -1,53 +0,0 @@ -config = { - "interfaces": { - "google.monitoring.v3.GroupService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "ListGroups": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetGroup": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateGroup": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateGroup": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteGroup": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListGroupMembers": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/monitoring/google/cloud/monitoring_v3/gapic/metric_service_client.py b/monitoring/google/cloud/monitoring_v3/gapic/metric_service_client.py deleted file mode 100644 index 19ddd05c5c81..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/metric_service_client.py +++ /dev/null @@ -1,1025 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.monitoring.v3 MetricService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.api import metric_pb2 as api_metric_pb2 -from google.api import monitored_resource_pb2 -from google.cloud.monitoring_v3.gapic import enums -from google.cloud.monitoring_v3.gapic import metric_service_client_config -from google.cloud.monitoring_v3.gapic.transports import metric_service_grpc_transport -from google.cloud.monitoring_v3.proto import alert_pb2 -from google.cloud.monitoring_v3.proto import alert_service_pb2 -from google.cloud.monitoring_v3.proto import alert_service_pb2_grpc -from google.cloud.monitoring_v3.proto import common_pb2 -from google.cloud.monitoring_v3.proto import group_pb2 -from google.cloud.monitoring_v3.proto import group_service_pb2 -from google.cloud.monitoring_v3.proto import group_service_pb2_grpc -from google.cloud.monitoring_v3.proto import metric_pb2 as proto_metric_pb2 -from google.cloud.monitoring_v3.proto import metric_service_pb2 -from google.cloud.monitoring_v3.proto import metric_service_pb2_grpc -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-monitoring" -).version - - -class MetricServiceClient(object): - """ - Manages metric descriptors, monitored resource descriptors, and - time series data. - """ - - SERVICE_ADDRESS = "monitoring.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.monitoring.v3.MetricService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def metric_descriptor_path(cls, project, metric_descriptor): - """Return a fully-qualified metric_descriptor string.""" - return google.api_core.path_template.expand( - "projects/{project}/metricDescriptors/{metric_descriptor=**}", - project=project, - metric_descriptor=metric_descriptor, - ) - - @classmethod - def monitored_resource_descriptor_path(cls, project, monitored_resource_descriptor): - """Return a fully-qualified monitored_resource_descriptor string.""" - return google.api_core.path_template.expand( - "projects/{project}/monitoredResourceDescriptors/{monitored_resource_descriptor}", - project=project, - monitored_resource_descriptor=monitored_resource_descriptor, - ) - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.MetricServiceGrpcTransport, - Callable[[~.Credentials, type], ~.MetricServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = metric_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=metric_service_grpc_transport.MetricServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = metric_service_grpc_transport.MetricServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def list_monitored_resource_descriptors( - self, - name, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists monitored resource descriptors that match a filter. This method does not require a Stackdriver account. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.MetricServiceClient() - >>> - >>> name = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_monitored_resource_descriptors(name): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_monitored_resource_descriptors(name).pages: - ... for element in page: - ... # process element - ... pass - - Args: - name (str): The project on which to execute the request. The format is - ``"projects/{project_id_or_number}"``. - filter_ (str): An optional - `filter `__ - describing the descriptors to be returned. The filter can reference the - descriptor's type and labels. For example, the following filter returns - only Google Compute Engine descriptors that have an ``id`` label: - - :: - - resource.type = starts_with("gce_") AND resource.label:id - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.monitoring_v3.types.MonitoredResourceDescriptor` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "list_monitored_resource_descriptors" not in self._inner_api_calls: - self._inner_api_calls[ - "list_monitored_resource_descriptors" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_monitored_resource_descriptors, - default_retry=self._method_configs[ - "ListMonitoredResourceDescriptors" - ].retry, - default_timeout=self._method_configs[ - "ListMonitoredResourceDescriptors" - ].timeout, - client_info=self._client_info, - ) - - request = metric_service_pb2.ListMonitoredResourceDescriptorsRequest( - name=name, filter=filter_, page_size=page_size - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_monitored_resource_descriptors"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="resource_descriptors", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_monitored_resource_descriptor( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a single monitored resource descriptor. This method does not require a Stackdriver account. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.MetricServiceClient() - >>> - >>> name = client.monitored_resource_descriptor_path('[PROJECT]', '[MONITORED_RESOURCE_DESCRIPTOR]') - >>> - >>> response = client.get_monitored_resource_descriptor(name) - - Args: - name (str): The monitored resource descriptor to get. The format is - ``"projects/{project_id_or_number}/monitoredResourceDescriptors/{resource_type}"``. - The ``{resource_type}`` is a predefined type, such as - ``cloudsql_database``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.MonitoredResourceDescriptor` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "get_monitored_resource_descriptor" not in self._inner_api_calls: - self._inner_api_calls[ - "get_monitored_resource_descriptor" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_monitored_resource_descriptor, - default_retry=self._method_configs[ - "GetMonitoredResourceDescriptor" - ].retry, - default_timeout=self._method_configs[ - "GetMonitoredResourceDescriptor" - ].timeout, - client_info=self._client_info, - ) - - request = metric_service_pb2.GetMonitoredResourceDescriptorRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_monitored_resource_descriptor"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_metric_descriptors( - self, - name, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists metric descriptors that match a filter. This method does not require a Stackdriver account. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.MetricServiceClient() - >>> - >>> name = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_metric_descriptors(name): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_metric_descriptors(name).pages: - ... for element in page: - ... # process element - ... pass - - Args: - name (str): The project on which to execute the request. The format is - ``"projects/{project_id_or_number}"``. - filter_ (str): If this field is empty, all custom and system-defined metric descriptors - are returned. Otherwise, the - `filter `__ - specifies which metric descriptors are to be returned. For example, the - following filter matches all `custom - metrics `__: - - :: - - metric.type = starts_with("custom.googleapis.com/") - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.monitoring_v3.types.MetricDescriptor` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "list_metric_descriptors" not in self._inner_api_calls: - self._inner_api_calls[ - "list_metric_descriptors" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_metric_descriptors, - default_retry=self._method_configs["ListMetricDescriptors"].retry, - default_timeout=self._method_configs["ListMetricDescriptors"].timeout, - client_info=self._client_info, - ) - - request = metric_service_pb2.ListMetricDescriptorsRequest( - name=name, filter=filter_, page_size=page_size - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_metric_descriptors"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="metric_descriptors", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_metric_descriptor( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a single metric descriptor. This method does not require a Stackdriver account. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.MetricServiceClient() - >>> - >>> name = client.metric_descriptor_path('[PROJECT]', '[METRIC_DESCRIPTOR]') - >>> - >>> response = client.get_metric_descriptor(name) - - Args: - name (str): The metric descriptor on which to execute the request. The format is - ``"projects/{project_id_or_number}/metricDescriptors/{metric_id}"``. An - example value of ``{metric_id}`` is - ``"compute.googleapis.com/instance/disk/read_bytes_count"``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.MetricDescriptor` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "get_metric_descriptor" not in self._inner_api_calls: - self._inner_api_calls[ - "get_metric_descriptor" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_metric_descriptor, - default_retry=self._method_configs["GetMetricDescriptor"].retry, - default_timeout=self._method_configs["GetMetricDescriptor"].timeout, - client_info=self._client_info, - ) - - request = metric_service_pb2.GetMetricDescriptorRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_metric_descriptor"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_metric_descriptor( - self, - name, - metric_descriptor, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new metric descriptor. User-created metric descriptors define - `custom metrics `__. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.MetricServiceClient() - >>> - >>> name = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `metric_descriptor`: - >>> metric_descriptor = {} - >>> - >>> response = client.create_metric_descriptor(name, metric_descriptor) - - Args: - name (str): The project on which to execute the request. The format is - ``"projects/{project_id_or_number}"``. - metric_descriptor (Union[dict, ~google.cloud.monitoring_v3.types.MetricDescriptor]): The new `custom - metric `__ - descriptor. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.MetricDescriptor` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.MetricDescriptor` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "create_metric_descriptor" not in self._inner_api_calls: - self._inner_api_calls[ - "create_metric_descriptor" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_metric_descriptor, - default_retry=self._method_configs["CreateMetricDescriptor"].retry, - default_timeout=self._method_configs["CreateMetricDescriptor"].timeout, - client_info=self._client_info, - ) - - request = metric_service_pb2.CreateMetricDescriptorRequest( - name=name, metric_descriptor=metric_descriptor - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_metric_descriptor"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_metric_descriptor( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a metric descriptor. Only user-created `custom - metrics `__ can be - deleted. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.MetricServiceClient() - >>> - >>> name = client.metric_descriptor_path('[PROJECT]', '[METRIC_DESCRIPTOR]') - >>> - >>> client.delete_metric_descriptor(name) - - Args: - name (str): The metric descriptor on which to execute the request. The format is - ``"projects/{project_id_or_number}/metricDescriptors/{metric_id}"``. An - example of ``{metric_id}`` is: - ``"custom.googleapis.com/my_test_metric"``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "delete_metric_descriptor" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_metric_descriptor" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_metric_descriptor, - default_retry=self._method_configs["DeleteMetricDescriptor"].retry, - default_timeout=self._method_configs["DeleteMetricDescriptor"].timeout, - client_info=self._client_info, - ) - - request = metric_service_pb2.DeleteMetricDescriptorRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_metric_descriptor"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_time_series( - self, - name, - filter_, - interval, - view, - aggregation=None, - order_by=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists time series that match a filter. This method does not require a Stackdriver account. - - Example: - >>> from google.cloud import monitoring_v3 - >>> from google.cloud.monitoring_v3 import enums - >>> - >>> client = monitoring_v3.MetricServiceClient() - >>> - >>> name = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `filter_`: - >>> filter_ = '' - >>> - >>> # TODO: Initialize `interval`: - >>> interval = {} - >>> - >>> # TODO: Initialize `view`: - >>> view = enums.ListTimeSeriesRequest.TimeSeriesView.FULL - >>> - >>> # Iterate over all results - >>> for element in client.list_time_series(name, filter_, interval, view): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_time_series(name, filter_, interval, view).pages: - ... for element in page: - ... # process element - ... pass - - Args: - name (str): The project on which to execute the request. The format is - "projects/{project\_id\_or\_number}". - filter_ (str): A `monitoring - filter `__ that - specifies which time series should be returned. The filter must specify - a single metric type, and can additionally specify metric labels and - other information. For example: - - :: - - metric.type = "compute.googleapis.com/instance/cpu/usage_time" AND - metric.labels.instance_name = "my-instance-name" - interval (Union[dict, ~google.cloud.monitoring_v3.types.TimeInterval]): The time interval for which results should be returned. Only time series - that contain data points in the specified interval are included - in the response. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.TimeInterval` - view (~google.cloud.monitoring_v3.types.TimeSeriesView): Specifies which information is returned about the time series. - aggregation (Union[dict, ~google.cloud.monitoring_v3.types.Aggregation]): Specifies the alignment of data points in individual time series as well - as how to combine the retrieved time series across specified labels. - - By default (if no ``aggregation`` is explicitly specified), the raw time - series data is returned. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.Aggregation` - order_by (str): Unsupported: must be left blank. The points in each time series are - currently returned in reverse time order (most recent to oldest). - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.monitoring_v3.types.TimeSeries` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "list_time_series" not in self._inner_api_calls: - self._inner_api_calls[ - "list_time_series" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_time_series, - default_retry=self._method_configs["ListTimeSeries"].retry, - default_timeout=self._method_configs["ListTimeSeries"].timeout, - client_info=self._client_info, - ) - - request = metric_service_pb2.ListTimeSeriesRequest( - name=name, - filter=filter_, - interval=interval, - view=view, - aggregation=aggregation, - order_by=order_by, - page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_time_series"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="time_series", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def create_time_series( - self, - name, - time_series, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates or adds data to one or more time series. - The response is empty if all time series in the request were written. - If any time series could not be written, a corresponding failure message is - included in the error response. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.MetricServiceClient() - >>> - >>> name = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `time_series`: - >>> time_series = [] - >>> - >>> client.create_time_series(name, time_series) - - Args: - name (str): The project on which to execute the request. The format is - ``"projects/{project_id_or_number}"``. - time_series (list[Union[dict, ~google.cloud.monitoring_v3.types.TimeSeries]]): The new data to be added to a list of time series. Adds at most one data - point to each of several time series. The new data point must be more - recent than any other point in its time series. Each ``TimeSeries`` - value must fully specify a unique time series by supplying all label - values for the metric and the monitored resource. - - The maximum number of ``TimeSeries`` objects per ``Create`` request is - 200. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.TimeSeries` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "create_time_series" not in self._inner_api_calls: - self._inner_api_calls[ - "create_time_series" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_time_series, - default_retry=self._method_configs["CreateTimeSeries"].retry, - default_timeout=self._method_configs["CreateTimeSeries"].timeout, - client_info=self._client_info, - ) - - request = metric_service_pb2.CreateTimeSeriesRequest( - name=name, time_series=time_series - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["create_time_series"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/monitoring/google/cloud/monitoring_v3/gapic/metric_service_client_config.py b/monitoring/google/cloud/monitoring_v3/gapic/metric_service_client_config.py deleted file mode 100644 index 165949239f6c..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/metric_service_client_config.py +++ /dev/null @@ -1,63 +0,0 @@ -config = { - "interfaces": { - "google.monitoring.v3.MetricService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "ListMonitoredResourceDescriptors": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetMonitoredResourceDescriptor": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListMetricDescriptors": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetMetricDescriptor": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateMetricDescriptor": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteMetricDescriptor": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListTimeSeries": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateTimeSeries": { - "timeout_millis": 12000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/monitoring/google/cloud/monitoring_v3/gapic/notification_channel_service_client.py b/monitoring/google/cloud/monitoring_v3/gapic/notification_channel_service_client.py deleted file mode 100644 index 0445837c01f7..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/notification_channel_service_client.py +++ /dev/null @@ -1,1176 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.monitoring.v3 NotificationChannelService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.api import metric_pb2 as api_metric_pb2 -from google.api import monitored_resource_pb2 -from google.cloud.monitoring_v3.gapic import enums -from google.cloud.monitoring_v3.gapic import notification_channel_service_client_config -from google.cloud.monitoring_v3.gapic.transports import ( - notification_channel_service_grpc_transport, -) -from google.cloud.monitoring_v3.proto import alert_pb2 -from google.cloud.monitoring_v3.proto import alert_service_pb2 -from google.cloud.monitoring_v3.proto import alert_service_pb2_grpc -from google.cloud.monitoring_v3.proto import common_pb2 -from google.cloud.monitoring_v3.proto import group_pb2 -from google.cloud.monitoring_v3.proto import group_service_pb2 -from google.cloud.monitoring_v3.proto import group_service_pb2_grpc -from google.cloud.monitoring_v3.proto import metric_pb2 as proto_metric_pb2 -from google.cloud.monitoring_v3.proto import metric_service_pb2 -from google.cloud.monitoring_v3.proto import metric_service_pb2_grpc -from google.cloud.monitoring_v3.proto import notification_pb2 -from google.cloud.monitoring_v3.proto import notification_service_pb2 -from google.cloud.monitoring_v3.proto import notification_service_pb2_grpc -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-monitoring" -).version - - -class NotificationChannelServiceClient(object): - """ - The Notification Channel API provides access to configuration that - controls how messages related to incidents are sent. - """ - - SERVICE_ADDRESS = "monitoring.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.monitoring.v3.NotificationChannelService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - NotificationChannelServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def notification_channel_path(cls, project, notification_channel): - """Return a fully-qualified notification_channel string.""" - return google.api_core.path_template.expand( - "projects/{project}/notificationChannels/{notification_channel}", - project=project, - notification_channel=notification_channel, - ) - - @classmethod - def notification_channel_descriptor_path(cls, project, channel_descriptor): - """Return a fully-qualified notification_channel_descriptor string.""" - return google.api_core.path_template.expand( - "projects/{project}/notificationChannelDescriptors/{channel_descriptor}", - project=project, - channel_descriptor=channel_descriptor, - ) - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.NotificationChannelServiceGrpcTransport, - Callable[[~.Credentials, type], ~.NotificationChannelServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = notification_channel_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=notification_channel_service_grpc_transport.NotificationChannelServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = notification_channel_service_grpc_transport.NotificationChannelServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def list_notification_channel_descriptors( - self, - name, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the descriptors for supported channel types. The use of descriptors - makes it possible for new channel types to be dynamically added. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.NotificationChannelServiceClient() - >>> - >>> name = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_notification_channel_descriptors(name): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_notification_channel_descriptors(name).pages: - ... for element in page: - ... # process element - ... pass - - Args: - name (str): The REST resource name of the parent from which to retrieve the - notification channel descriptors. The expected syntax is: - - :: - - projects/[PROJECT_ID] - - Note that this names the parent container in which to look for the - descriptors; to retrieve a single descriptor by name, use the - ``GetNotificationChannelDescriptor`` operation, instead. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.monitoring_v3.types.NotificationChannelDescriptor` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "list_notification_channel_descriptors" not in self._inner_api_calls: - self._inner_api_calls[ - "list_notification_channel_descriptors" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_notification_channel_descriptors, - default_retry=self._method_configs[ - "ListNotificationChannelDescriptors" - ].retry, - default_timeout=self._method_configs[ - "ListNotificationChannelDescriptors" - ].timeout, - client_info=self._client_info, - ) - - request = notification_service_pb2.ListNotificationChannelDescriptorsRequest( - name=name, page_size=page_size - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_notification_channel_descriptors"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="channel_descriptors", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_notification_channel_descriptor( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a single channel descriptor. The descriptor indicates which fields - are expected / permitted for a notification channel of the given type. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.NotificationChannelServiceClient() - >>> - >>> name = client.notification_channel_descriptor_path('[PROJECT]', '[CHANNEL_DESCRIPTOR]') - >>> - >>> response = client.get_notification_channel_descriptor(name) - - Args: - name (str): The channel type for which to execute the request. The format is - ``projects/[PROJECT_ID]/notificationChannelDescriptors/{channel_type}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.NotificationChannelDescriptor` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "get_notification_channel_descriptor" not in self._inner_api_calls: - self._inner_api_calls[ - "get_notification_channel_descriptor" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_notification_channel_descriptor, - default_retry=self._method_configs[ - "GetNotificationChannelDescriptor" - ].retry, - default_timeout=self._method_configs[ - "GetNotificationChannelDescriptor" - ].timeout, - client_info=self._client_info, - ) - - request = notification_service_pb2.GetNotificationChannelDescriptorRequest( - name=name - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_notification_channel_descriptor"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_notification_channels( - self, - name, - filter_=None, - order_by=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the notification channels that have been created for the project. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.NotificationChannelServiceClient() - >>> - >>> name = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_notification_channels(name): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_notification_channels(name).pages: - ... for element in page: - ... # process element - ... pass - - Args: - name (str): The project on which to execute the request. The format is - ``projects/[PROJECT_ID]``. That is, this names the container in which to - look for the notification channels; it does not name a specific channel. - To query a specific channel by REST resource name, use the - ``GetNotificationChannel`` operation. - filter_ (str): If provided, this field specifies the criteria that must be met by - notification channels to be included in the response. - - For more details, see `sorting and - filtering `__. - order_by (str): A comma-separated list of fields by which to sort the result. Supports - the same set of fields as in ``filter``. Entries can be prefixed with a - minus sign to sort in descending rather than ascending order. - - For more details, see `sorting and - filtering `__. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.monitoring_v3.types.NotificationChannel` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "list_notification_channels" not in self._inner_api_calls: - self._inner_api_calls[ - "list_notification_channels" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_notification_channels, - default_retry=self._method_configs["ListNotificationChannels"].retry, - default_timeout=self._method_configs[ - "ListNotificationChannels" - ].timeout, - client_info=self._client_info, - ) - - request = notification_service_pb2.ListNotificationChannelsRequest( - name=name, filter=filter_, order_by=order_by, page_size=page_size - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_notification_channels"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="notification_channels", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_notification_channel( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a single notification channel. The channel includes the relevant - configuration details with which the channel was created. However, the - response may truncate or omit passwords, API keys, or other private key - matter and thus the response may not be 100% identical to the information - that was supplied in the call to the create method. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.NotificationChannelServiceClient() - >>> - >>> name = client.notification_channel_path('[PROJECT]', '[NOTIFICATION_CHANNEL]') - >>> - >>> response = client.get_notification_channel(name) - - Args: - name (str): The channel for which to execute the request. The format is - ``projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID]``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.NotificationChannel` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "get_notification_channel" not in self._inner_api_calls: - self._inner_api_calls[ - "get_notification_channel" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_notification_channel, - default_retry=self._method_configs["GetNotificationChannel"].retry, - default_timeout=self._method_configs["GetNotificationChannel"].timeout, - client_info=self._client_info, - ) - - request = notification_service_pb2.GetNotificationChannelRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_notification_channel"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_notification_channel( - self, - name, - notification_channel, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new notification channel, representing a single notification - endpoint such as an email address, SMS number, or PagerDuty service. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.NotificationChannelServiceClient() - >>> - >>> name = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `notification_channel`: - >>> notification_channel = {} - >>> - >>> response = client.create_notification_channel(name, notification_channel) - - Args: - name (str): The project on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID] - - Note that this names the container into which the channel will be - written. This does not name the newly created channel. The resulting - channel's name will have a normalized version of this field as a prefix, - but will add ``/notificationChannels/[CHANNEL_ID]`` to identify the - channel. - notification_channel (Union[dict, ~google.cloud.monitoring_v3.types.NotificationChannel]): The definition of the ``NotificationChannel`` to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.NotificationChannel` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.NotificationChannel` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "create_notification_channel" not in self._inner_api_calls: - self._inner_api_calls[ - "create_notification_channel" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_notification_channel, - default_retry=self._method_configs["CreateNotificationChannel"].retry, - default_timeout=self._method_configs[ - "CreateNotificationChannel" - ].timeout, - client_info=self._client_info, - ) - - request = notification_service_pb2.CreateNotificationChannelRequest( - name=name, notification_channel=notification_channel - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_notification_channel"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_notification_channel( - self, - notification_channel, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a notification channel. Fields not specified in the field mask - remain unchanged. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.NotificationChannelServiceClient() - >>> - >>> # TODO: Initialize `notification_channel`: - >>> notification_channel = {} - >>> - >>> response = client.update_notification_channel(notification_channel) - - Args: - notification_channel (Union[dict, ~google.cloud.monitoring_v3.types.NotificationChannel]): A description of the changes to be applied to the specified notification - channel. The description must provide a definition for fields to be - updated; the names of these fields should also be included in the - ``update_mask``. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.NotificationChannel` - update_mask (Union[dict, ~google.cloud.monitoring_v3.types.FieldMask]): The fields to update. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.NotificationChannel` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "update_notification_channel" not in self._inner_api_calls: - self._inner_api_calls[ - "update_notification_channel" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_notification_channel, - default_retry=self._method_configs["UpdateNotificationChannel"].retry, - default_timeout=self._method_configs[ - "UpdateNotificationChannel" - ].timeout, - client_info=self._client_info, - ) - - request = notification_service_pb2.UpdateNotificationChannelRequest( - notification_channel=notification_channel, update_mask=update_mask - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("notification_channel.name", notification_channel.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_notification_channel"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_notification_channel( - self, - name, - force=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a notification channel. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.NotificationChannelServiceClient() - >>> - >>> name = client.notification_channel_path('[PROJECT]', '[NOTIFICATION_CHANNEL]') - >>> - >>> client.delete_notification_channel(name) - - Args: - name (str): The channel for which to execute the request. The format is - ``projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID]``. - force (bool): If true, the notification channel will be deleted regardless of its - use in alert policies (the policies will be updated to remove the - channel). If false, channels that are still referenced by an existing - alerting policy will fail to be deleted in a delete operation. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "delete_notification_channel" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_notification_channel" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_notification_channel, - default_retry=self._method_configs["DeleteNotificationChannel"].retry, - default_timeout=self._method_configs[ - "DeleteNotificationChannel" - ].timeout, - client_info=self._client_info, - ) - - request = notification_service_pb2.DeleteNotificationChannelRequest( - name=name, force=force - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_notification_channel"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def send_notification_channel_verification_code( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Causes a verification code to be delivered to the channel. The code can - then be supplied in ``VerifyNotificationChannel`` to verify the channel. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.NotificationChannelServiceClient() - >>> - >>> name = client.notification_channel_path('[PROJECT]', '[NOTIFICATION_CHANNEL]') - >>> - >>> client.send_notification_channel_verification_code(name) - - Args: - name (str): The notification channel to which to send a verification code. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "send_notification_channel_verification_code" not in self._inner_api_calls: - self._inner_api_calls[ - "send_notification_channel_verification_code" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.send_notification_channel_verification_code, - default_retry=self._method_configs[ - "SendNotificationChannelVerificationCode" - ].retry, - default_timeout=self._method_configs[ - "SendNotificationChannelVerificationCode" - ].timeout, - client_info=self._client_info, - ) - - request = notification_service_pb2.SendNotificationChannelVerificationCodeRequest( - name=name - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["send_notification_channel_verification_code"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_notification_channel_verification_code( - self, - name, - expire_time=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Requests a verification code for an already verified channel that can then - be used in a call to VerifyNotificationChannel() on a different channel - with an equivalent identity in the same or in a different project. This - makes it possible to copy a channel between projects without requiring - manual reverification of the channel. If the channel is not in the - verified state, this method will fail (in other words, this may only be - used if the SendNotificationChannelVerificationCode and - VerifyNotificationChannel paths have already been used to put the given - channel into the verified state). - - There is no guarantee that the verification codes returned by this method - will be of a similar structure or form as the ones that are delivered - to the channel via SendNotificationChannelVerificationCode; while - VerifyNotificationChannel() will recognize both the codes delivered via - SendNotificationChannelVerificationCode() and returned from - GetNotificationChannelVerificationCode(), it is typically the case that - the verification codes delivered via - SendNotificationChannelVerificationCode() will be shorter and also - have a shorter expiration (e.g. codes such as "G-123456") whereas - GetVerificationCode() will typically return a much longer, websafe base - 64 encoded string that has a longer expiration time. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.NotificationChannelServiceClient() - >>> - >>> name = client.notification_channel_path('[PROJECT]', '[NOTIFICATION_CHANNEL]') - >>> - >>> response = client.get_notification_channel_verification_code(name) - - Args: - name (str): The notification channel for which a verification code is to be generated - and retrieved. This must name a channel that is already verified; if - the specified channel is not verified, the request will fail. - expire_time (Union[dict, ~google.cloud.monitoring_v3.types.Timestamp]): The desired expiration time. If specified, the API will guarantee that - the returned code will not be valid after the specified timestamp; - however, the API cannot guarantee that the returned code will be - valid for at least as long as the requested time (the API puts an upper - bound on the amount of time for which a code may be valid). If omitted, - a default expiration will be used, which may be less than the max - permissible expiration (so specifying an expiration may extend the - code's lifetime over omitting an expiration, even though the API does - impose an upper limit on the maximum expiration that is permitted). - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.Timestamp` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.GetNotificationChannelVerificationCodeResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "get_notification_channel_verification_code" not in self._inner_api_calls: - self._inner_api_calls[ - "get_notification_channel_verification_code" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_notification_channel_verification_code, - default_retry=self._method_configs[ - "GetNotificationChannelVerificationCode" - ].retry, - default_timeout=self._method_configs[ - "GetNotificationChannelVerificationCode" - ].timeout, - client_info=self._client_info, - ) - - request = notification_service_pb2.GetNotificationChannelVerificationCodeRequest( - name=name, expire_time=expire_time - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_notification_channel_verification_code"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def verify_notification_channel( - self, - name, - code, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Verifies a ``NotificationChannel`` by proving receipt of the code - delivered to the channel as a result of calling - ``SendNotificationChannelVerificationCode``. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.NotificationChannelServiceClient() - >>> - >>> name = client.notification_channel_path('[PROJECT]', '[NOTIFICATION_CHANNEL]') - >>> - >>> # TODO: Initialize `code`: - >>> code = '' - >>> - >>> response = client.verify_notification_channel(name, code) - - Args: - name (str): The notification channel to verify. - code (str): The verification code that was delivered to the channel as a result of - invoking the ``SendNotificationChannelVerificationCode`` API method or - that was retrieved from a verified channel via - ``GetNotificationChannelVerificationCode``. For example, one might have - "G-123456" or "TKNZGhhd2EyN3I1MnRnMjRv" (in general, one is only - guaranteed that the code is valid UTF-8; one should not make any - assumptions regarding the structure or format of the code). - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.NotificationChannel` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "verify_notification_channel" not in self._inner_api_calls: - self._inner_api_calls[ - "verify_notification_channel" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.verify_notification_channel, - default_retry=self._method_configs["VerifyNotificationChannel"].retry, - default_timeout=self._method_configs[ - "VerifyNotificationChannel" - ].timeout, - client_info=self._client_info, - ) - - request = notification_service_pb2.VerifyNotificationChannelRequest( - name=name, code=code - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["verify_notification_channel"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/monitoring/google/cloud/monitoring_v3/gapic/notification_channel_service_client_config.py b/monitoring/google/cloud/monitoring_v3/gapic/notification_channel_service_client_config.py deleted file mode 100644 index 6d0e1e1d1103..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/notification_channel_service_client_config.py +++ /dev/null @@ -1,73 +0,0 @@ -config = { - "interfaces": { - "google.monitoring.v3.NotificationChannelService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "ListNotificationChannelDescriptors": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetNotificationChannelDescriptor": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListNotificationChannels": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetNotificationChannel": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateNotificationChannel": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateNotificationChannel": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteNotificationChannel": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "SendNotificationChannelVerificationCode": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetNotificationChannelVerificationCode": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "VerifyNotificationChannel": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/monitoring/google/cloud/monitoring_v3/gapic/service_monitoring_service_client.py b/monitoring/google/cloud/monitoring_v3/gapic/service_monitoring_service_client.py deleted file mode 100644 index 0daa8970cf59..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/service_monitoring_service_client.py +++ /dev/null @@ -1,1127 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.monitoring.v3 ServiceMonitoringService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.api import metric_pb2 as api_metric_pb2 -from google.api import monitored_resource_pb2 -from google.cloud.monitoring_v3.gapic import enums -from google.cloud.monitoring_v3.gapic import service_monitoring_service_client_config -from google.cloud.monitoring_v3.gapic.transports import ( - service_monitoring_service_grpc_transport, -) -from google.cloud.monitoring_v3.proto import alert_pb2 -from google.cloud.monitoring_v3.proto import alert_service_pb2 -from google.cloud.monitoring_v3.proto import alert_service_pb2_grpc -from google.cloud.monitoring_v3.proto import common_pb2 -from google.cloud.monitoring_v3.proto import group_pb2 -from google.cloud.monitoring_v3.proto import group_service_pb2 -from google.cloud.monitoring_v3.proto import group_service_pb2_grpc -from google.cloud.monitoring_v3.proto import metric_pb2 as proto_metric_pb2 -from google.cloud.monitoring_v3.proto import metric_service_pb2 -from google.cloud.monitoring_v3.proto import metric_service_pb2_grpc -from google.cloud.monitoring_v3.proto import notification_pb2 -from google.cloud.monitoring_v3.proto import notification_service_pb2 -from google.cloud.monitoring_v3.proto import notification_service_pb2_grpc -from google.cloud.monitoring_v3.proto import service_pb2 -from google.cloud.monitoring_v3.proto import service_service_pb2 -from google.cloud.monitoring_v3.proto import service_service_pb2_grpc -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-monitoring" -).version - - -class ServiceMonitoringServiceClient(object): - """ - The Stackdriver Monitoring Service-Oriented Monitoring API has endpoints - for managing and querying aspects of a workspace's services. These - include the ``Service``'s monitored resources, its Service-Level - Objectives, and a taxonomy of categorized Health Metrics. - """ - - SERVICE_ADDRESS = "monitoring.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.monitoring.v3.ServiceMonitoringService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ServiceMonitoringServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - - @classmethod - def service_path(cls, project, service): - """Return a fully-qualified service string.""" - return google.api_core.path_template.expand( - "projects/{project}/services/{service}", project=project, service=service - ) - - @classmethod - def service_level_objective_path(cls, project, service, service_level_objective): - """Return a fully-qualified service_level_objective string.""" - return google.api_core.path_template.expand( - "projects/{project}/services/{service}/serviceLevelObjectives/{service_level_objective}", - project=project, - service=service, - service_level_objective=service_level_objective, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.ServiceMonitoringServiceGrpcTransport, - Callable[[~.Credentials, type], ~.ServiceMonitoringServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = service_monitoring_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=service_monitoring_service_grpc_transport.ServiceMonitoringServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = service_monitoring_service_grpc_transport.ServiceMonitoringServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_service( - self, - parent, - service, - service_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Create a ``Service``. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.ServiceMonitoringServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `service`: - >>> service = {} - >>> - >>> response = client.create_service(parent, service) - - Args: - parent (str): Resource name of the parent workspace. Of the form - ``projects/{project_id}``. - service (Union[dict, ~google.cloud.monitoring_v3.types.Service]): The ``Service`` to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.Service` - service_id (str): Optional. The Service id to use for this Service. If omitted, an id will - be generated instead. Must match the pattern [a-z0-9-]+ - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.Service` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "create_service" not in self._inner_api_calls: - self._inner_api_calls[ - "create_service" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_service, - default_retry=self._method_configs["CreateService"].retry, - default_timeout=self._method_configs["CreateService"].timeout, - client_info=self._client_info, - ) - - request = service_service_pb2.CreateServiceRequest( - parent=parent, service=service, service_id=service_id - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_service"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_service( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Get the named ``Service``. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.ServiceMonitoringServiceClient() - >>> - >>> name = client.service_path('[PROJECT]', '[SERVICE]') - >>> - >>> response = client.get_service(name) - - Args: - name (str): Resource name of the ``Service``. Of the form - ``projects/{project_id}/services/{service_id}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.Service` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "get_service" not in self._inner_api_calls: - self._inner_api_calls[ - "get_service" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_service, - default_retry=self._method_configs["GetService"].retry, - default_timeout=self._method_configs["GetService"].timeout, - client_info=self._client_info, - ) - - request = service_service_pb2.GetServiceRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_service"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_services( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - List ``Service``\ s for this workspace. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.ServiceMonitoringServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_services(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_services(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Resource name of the parent ``Workspace``. Of the form - ``projects/{project_id}``. - filter_ (str): A filter specifying what ``Service``\ s to return. The filter currently - supports the following fields: - - :: - - - `identifier_case` - - `app_engine.module_id` - - `cloud_endpoints.service` - - `cluster_istio.location` - - `cluster_istio.cluster_name` - - `cluster_istio.service_namespace` - - `cluster_istio.service_name` - - ``identifier_case`` refers to which option in the identifier oneof is - populated. For example, the filter ``identifier_case = "CUSTOM"`` would - match all services with a value for the ``custom`` field. Valid options - are "CUSTOM", "APP\_ENGINE", "CLOUD\_ENDPOINTS", and "CLUSTER\_ISTIO". - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.monitoring_v3.types.Service` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "list_services" not in self._inner_api_calls: - self._inner_api_calls[ - "list_services" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_services, - default_retry=self._method_configs["ListServices"].retry, - default_timeout=self._method_configs["ListServices"].timeout, - client_info=self._client_info, - ) - - request = service_service_pb2.ListServicesRequest( - parent=parent, filter=filter_, page_size=page_size - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_services"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="services", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def update_service( - self, - service, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Update this ``Service``. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.ServiceMonitoringServiceClient() - >>> - >>> # TODO: Initialize `service`: - >>> service = {} - >>> - >>> response = client.update_service(service) - - Args: - service (Union[dict, ~google.cloud.monitoring_v3.types.Service]): The ``Service`` to draw updates from. The given ``name`` specifies the - resource to update. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.Service` - update_mask (Union[dict, ~google.cloud.monitoring_v3.types.FieldMask]): A set of field paths defining which fields to use for the update. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.Service` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "update_service" not in self._inner_api_calls: - self._inner_api_calls[ - "update_service" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_service, - default_retry=self._method_configs["UpdateService"].retry, - default_timeout=self._method_configs["UpdateService"].timeout, - client_info=self._client_info, - ) - - request = service_service_pb2.UpdateServiceRequest( - service=service, update_mask=update_mask - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("service.name", service.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_service"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_service( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Soft delete this ``Service``. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.ServiceMonitoringServiceClient() - >>> - >>> name = client.service_path('[PROJECT]', '[SERVICE]') - >>> - >>> client.delete_service(name) - - Args: - name (str): Resource name of the ``Service`` to delete. Of the form - ``projects/{project_id}/service/{service_id}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "delete_service" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_service" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_service, - default_retry=self._method_configs["DeleteService"].retry, - default_timeout=self._method_configs["DeleteService"].timeout, - client_info=self._client_info, - ) - - request = service_service_pb2.DeleteServiceRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_service"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_service_level_objective( - self, - parent, - service_level_objective, - service_level_objective_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Create a ``ServiceLevelObjective`` for the given ``Service``. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.ServiceMonitoringServiceClient() - >>> - >>> parent = client.service_path('[PROJECT]', '[SERVICE]') - >>> - >>> # TODO: Initialize `service_level_objective`: - >>> service_level_objective = {} - >>> - >>> response = client.create_service_level_objective(parent, service_level_objective) - - Args: - parent (str): Resource name of the parent ``Service``. Of the form - ``projects/{project_id}/services/{service_id}``. - service_level_objective (Union[dict, ~google.cloud.monitoring_v3.types.ServiceLevelObjective]): The ``ServiceLevelObjective`` to create. The provided ``name`` will be - respected if no ``ServiceLevelObjective`` exists with this name. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.ServiceLevelObjective` - service_level_objective_id (str): Optional. The ServiceLevelObjective id to use for this - ServiceLevelObjective. If omitted, an id will be generated instead. Must - match the pattern [a-z0-9-]+ - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.ServiceLevelObjective` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "create_service_level_objective" not in self._inner_api_calls: - self._inner_api_calls[ - "create_service_level_objective" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_service_level_objective, - default_retry=self._method_configs["CreateServiceLevelObjective"].retry, - default_timeout=self._method_configs[ - "CreateServiceLevelObjective" - ].timeout, - client_info=self._client_info, - ) - - request = service_service_pb2.CreateServiceLevelObjectiveRequest( - parent=parent, - service_level_objective=service_level_objective, - service_level_objective_id=service_level_objective_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_service_level_objective"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_service_level_objective( - self, - name, - view=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Get a ``ServiceLevelObjective`` by name. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.ServiceMonitoringServiceClient() - >>> - >>> name = client.service_level_objective_path('[PROJECT]', '[SERVICE]', '[SERVICE_LEVEL_OBJECTIVE]') - >>> - >>> response = client.get_service_level_objective(name) - - Args: - name (str): Resource name of the ``ServiceLevelObjective`` to get. Of the form - ``projects/{project_id}/services/{service_id}/serviceLevelObjectives/{slo_name}``. - view (~google.cloud.monitoring_v3.types.View): View of the ``ServiceLevelObjective`` to return. If ``DEFAULT``, return - the ``ServiceLevelObjective`` as originally defined. If ``EXPLICIT`` and - the ``ServiceLevelObjective`` is defined in terms of a ``BasicSli``, - replace the ``BasicSli`` with a ``RequestBasedSli`` spelling out how the - SLI is computed. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.ServiceLevelObjective` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "get_service_level_objective" not in self._inner_api_calls: - self._inner_api_calls[ - "get_service_level_objective" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_service_level_objective, - default_retry=self._method_configs["GetServiceLevelObjective"].retry, - default_timeout=self._method_configs[ - "GetServiceLevelObjective" - ].timeout, - client_info=self._client_info, - ) - - request = service_service_pb2.GetServiceLevelObjectiveRequest( - name=name, view=view - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_service_level_objective"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_service_level_objectives( - self, - parent, - filter_=None, - page_size=None, - view=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - List the ``ServiceLevelObjective``\ s for the given ``Service``. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.ServiceMonitoringServiceClient() - >>> - >>> parent = client.service_path('[PROJECT]', '[SERVICE]') - >>> - >>> # Iterate over all results - >>> for element in client.list_service_level_objectives(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_service_level_objectives(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Resource name of the parent ``Service``. Of the form - ``projects/{project_id}/services/{service_id}``. - filter_ (str): A filter specifying what ``ServiceLevelObjective``\ s to return. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - view (~google.cloud.monitoring_v3.types.View): View of the ``ServiceLevelObjective``\ s to return. If ``DEFAULT``, - return each ``ServiceLevelObjective`` as originally defined. If - ``EXPLICIT`` and the ``ServiceLevelObjective`` is defined in terms of a - ``BasicSli``, replace the ``BasicSli`` with a ``RequestBasedSli`` - spelling out how the SLI is computed. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.monitoring_v3.types.ServiceLevelObjective` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "list_service_level_objectives" not in self._inner_api_calls: - self._inner_api_calls[ - "list_service_level_objectives" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_service_level_objectives, - default_retry=self._method_configs["ListServiceLevelObjectives"].retry, - default_timeout=self._method_configs[ - "ListServiceLevelObjectives" - ].timeout, - client_info=self._client_info, - ) - - request = service_service_pb2.ListServiceLevelObjectivesRequest( - parent=parent, filter=filter_, page_size=page_size, view=view - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_service_level_objectives"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="service_level_objectives", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def update_service_level_objective( - self, - service_level_objective, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Update the given ``ServiceLevelObjective``. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.ServiceMonitoringServiceClient() - >>> - >>> # TODO: Initialize `service_level_objective`: - >>> service_level_objective = {} - >>> - >>> response = client.update_service_level_objective(service_level_objective) - - Args: - service_level_objective (Union[dict, ~google.cloud.monitoring_v3.types.ServiceLevelObjective]): The ``ServiceLevelObjective`` to draw updates from. The given ``name`` - specifies the resource to update. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.ServiceLevelObjective` - update_mask (Union[dict, ~google.cloud.monitoring_v3.types.FieldMask]): A set of field paths defining which fields to use for the update. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.ServiceLevelObjective` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "update_service_level_objective" not in self._inner_api_calls: - self._inner_api_calls[ - "update_service_level_objective" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_service_level_objective, - default_retry=self._method_configs["UpdateServiceLevelObjective"].retry, - default_timeout=self._method_configs[ - "UpdateServiceLevelObjective" - ].timeout, - client_info=self._client_info, - ) - - request = service_service_pb2.UpdateServiceLevelObjectiveRequest( - service_level_objective=service_level_objective, update_mask=update_mask - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [ - ("service_level_objective.name", service_level_objective.name) - ] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_service_level_objective"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_service_level_objective( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Delete the given ``ServiceLevelObjective``. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.ServiceMonitoringServiceClient() - >>> - >>> name = client.service_level_objective_path('[PROJECT]', '[SERVICE]', '[SERVICE_LEVEL_OBJECTIVE]') - >>> - >>> client.delete_service_level_objective(name) - - Args: - name (str): Resource name of the ``ServiceLevelObjective`` to delete. Of the form - ``projects/{project_id}/services/{service_id}/serviceLevelObjectives/{slo_name}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "delete_service_level_objective" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_service_level_objective" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_service_level_objective, - default_retry=self._method_configs["DeleteServiceLevelObjective"].retry, - default_timeout=self._method_configs[ - "DeleteServiceLevelObjective" - ].timeout, - client_info=self._client_info, - ) - - request = service_service_pb2.DeleteServiceLevelObjectiveRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_service_level_objective"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/monitoring/google/cloud/monitoring_v3/gapic/service_monitoring_service_client_config.py b/monitoring/google/cloud/monitoring_v3/gapic/service_monitoring_service_client_config.py deleted file mode 100644 index 575c0f88aef6..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/service_monitoring_service_client_config.py +++ /dev/null @@ -1,73 +0,0 @@ -config = { - "interfaces": { - "google.monitoring.v3.ServiceMonitoringService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "CreateService": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetService": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListServices": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "UpdateService": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteService": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateServiceLevelObjective": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetServiceLevelObjective": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListServiceLevelObjectives": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "UpdateServiceLevelObjective": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteServiceLevelObjective": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/monitoring/google/cloud/monitoring_v3/gapic/transports/__init__.py b/monitoring/google/cloud/monitoring_v3/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/monitoring/google/cloud/monitoring_v3/gapic/transports/alert_policy_service_grpc_transport.py b/monitoring/google/cloud/monitoring_v3/gapic/transports/alert_policy_service_grpc_transport.py deleted file mode 100644 index e911a8493dee..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/transports/alert_policy_service_grpc_transport.py +++ /dev/null @@ -1,183 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.monitoring_v3.proto import alert_service_pb2_grpc - - -class AlertPolicyServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.monitoring.v3 AlertPolicyService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/monitoring", - "https://www.googleapis.com/auth/monitoring.read", - "https://www.googleapis.com/auth/monitoring.write", - ) - - def __init__( - self, channel=None, credentials=None, address="monitoring.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "alert_policy_service_stub": alert_service_pb2_grpc.AlertPolicyServiceStub( - channel - ) - } - - @classmethod - def create_channel( - cls, address="monitoring.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def list_alert_policies(self): - """Return the gRPC stub for :meth:`AlertPolicyServiceClient.list_alert_policies`. - - Lists the existing alerting policies for the project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["alert_policy_service_stub"].ListAlertPolicies - - @property - def get_alert_policy(self): - """Return the gRPC stub for :meth:`AlertPolicyServiceClient.get_alert_policy`. - - Gets a single alerting policy. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["alert_policy_service_stub"].GetAlertPolicy - - @property - def create_alert_policy(self): - """Return the gRPC stub for :meth:`AlertPolicyServiceClient.create_alert_policy`. - - Creates a new alerting policy. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["alert_policy_service_stub"].CreateAlertPolicy - - @property - def delete_alert_policy(self): - """Return the gRPC stub for :meth:`AlertPolicyServiceClient.delete_alert_policy`. - - Deletes an alerting policy. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["alert_policy_service_stub"].DeleteAlertPolicy - - @property - def update_alert_policy(self): - """Return the gRPC stub for :meth:`AlertPolicyServiceClient.update_alert_policy`. - - Updates an alerting policy. You can either replace the entire policy - with a new one or replace only certain fields in the current alerting - policy by specifying the fields to be updated via ``updateMask``. - Returns the updated alerting policy. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["alert_policy_service_stub"].UpdateAlertPolicy diff --git a/monitoring/google/cloud/monitoring_v3/gapic/transports/group_service_grpc_transport.py b/monitoring/google/cloud/monitoring_v3/gapic/transports/group_service_grpc_transport.py deleted file mode 100644 index f1916baf37c0..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/transports/group_service_grpc_transport.py +++ /dev/null @@ -1,192 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.monitoring_v3.proto import group_service_pb2_grpc - - -class GroupServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.monitoring.v3 GroupService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/monitoring", - "https://www.googleapis.com/auth/monitoring.read", - "https://www.googleapis.com/auth/monitoring.write", - ) - - def __init__( - self, channel=None, credentials=None, address="monitoring.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "group_service_stub": group_service_pb2_grpc.GroupServiceStub(channel) - } - - @classmethod - def create_channel( - cls, address="monitoring.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def list_groups(self): - """Return the gRPC stub for :meth:`GroupServiceClient.list_groups`. - - Lists the existing groups. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["group_service_stub"].ListGroups - - @property - def get_group(self): - """Return the gRPC stub for :meth:`GroupServiceClient.get_group`. - - Gets a single group. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["group_service_stub"].GetGroup - - @property - def create_group(self): - """Return the gRPC stub for :meth:`GroupServiceClient.create_group`. - - Creates a new group. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["group_service_stub"].CreateGroup - - @property - def update_group(self): - """Return the gRPC stub for :meth:`GroupServiceClient.update_group`. - - Updates an existing group. You can change any group attributes except - ``name``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["group_service_stub"].UpdateGroup - - @property - def delete_group(self): - """Return the gRPC stub for :meth:`GroupServiceClient.delete_group`. - - Deletes an existing group. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["group_service_stub"].DeleteGroup - - @property - def list_group_members(self): - """Return the gRPC stub for :meth:`GroupServiceClient.list_group_members`. - - Lists the monitored resources that are members of a group. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["group_service_stub"].ListGroupMembers diff --git a/monitoring/google/cloud/monitoring_v3/gapic/transports/metric_service_grpc_transport.py b/monitoring/google/cloud/monitoring_v3/gapic/transports/metric_service_grpc_transport.py deleted file mode 100644 index 2359c23b0190..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/transports/metric_service_grpc_transport.py +++ /dev/null @@ -1,223 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.monitoring_v3.proto import metric_service_pb2_grpc - - -class MetricServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.monitoring.v3 MetricService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/monitoring", - "https://www.googleapis.com/auth/monitoring.read", - "https://www.googleapis.com/auth/monitoring.write", - ) - - def __init__( - self, channel=None, credentials=None, address="monitoring.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "metric_service_stub": metric_service_pb2_grpc.MetricServiceStub(channel) - } - - @classmethod - def create_channel( - cls, address="monitoring.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def list_monitored_resource_descriptors(self): - """Return the gRPC stub for :meth:`MetricServiceClient.list_monitored_resource_descriptors`. - - Lists monitored resource descriptors that match a filter. This method does not require a Stackdriver account. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metric_service_stub"].ListMonitoredResourceDescriptors - - @property - def get_monitored_resource_descriptor(self): - """Return the gRPC stub for :meth:`MetricServiceClient.get_monitored_resource_descriptor`. - - Gets a single monitored resource descriptor. This method does not require a Stackdriver account. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metric_service_stub"].GetMonitoredResourceDescriptor - - @property - def list_metric_descriptors(self): - """Return the gRPC stub for :meth:`MetricServiceClient.list_metric_descriptors`. - - Lists metric descriptors that match a filter. This method does not require a Stackdriver account. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metric_service_stub"].ListMetricDescriptors - - @property - def get_metric_descriptor(self): - """Return the gRPC stub for :meth:`MetricServiceClient.get_metric_descriptor`. - - Gets a single metric descriptor. This method does not require a Stackdriver account. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metric_service_stub"].GetMetricDescriptor - - @property - def create_metric_descriptor(self): - """Return the gRPC stub for :meth:`MetricServiceClient.create_metric_descriptor`. - - Creates a new metric descriptor. User-created metric descriptors define - `custom metrics `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metric_service_stub"].CreateMetricDescriptor - - @property - def delete_metric_descriptor(self): - """Return the gRPC stub for :meth:`MetricServiceClient.delete_metric_descriptor`. - - Deletes a metric descriptor. Only user-created `custom - metrics `__ can be - deleted. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metric_service_stub"].DeleteMetricDescriptor - - @property - def list_time_series(self): - """Return the gRPC stub for :meth:`MetricServiceClient.list_time_series`. - - Lists time series that match a filter. This method does not require a Stackdriver account. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metric_service_stub"].ListTimeSeries - - @property - def create_time_series(self): - """Return the gRPC stub for :meth:`MetricServiceClient.create_time_series`. - - Creates or adds data to one or more time series. - The response is empty if all time series in the request were written. - If any time series could not be written, a corresponding failure message is - included in the error response. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metric_service_stub"].CreateTimeSeries diff --git a/monitoring/google/cloud/monitoring_v3/gapic/transports/notification_channel_service_grpc_transport.py b/monitoring/google/cloud/monitoring_v3/gapic/transports/notification_channel_service_grpc_transport.py deleted file mode 100644 index ce54f4f7610c..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/transports/notification_channel_service_grpc_transport.py +++ /dev/null @@ -1,292 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.monitoring_v3.proto import notification_service_pb2_grpc - - -class NotificationChannelServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.monitoring.v3 NotificationChannelService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/monitoring", - "https://www.googleapis.com/auth/monitoring.read", - "https://www.googleapis.com/auth/monitoring.write", - ) - - def __init__( - self, channel=None, credentials=None, address="monitoring.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "notification_channel_service_stub": notification_service_pb2_grpc.NotificationChannelServiceStub( - channel - ) - } - - @classmethod - def create_channel( - cls, address="monitoring.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def list_notification_channel_descriptors(self): - """Return the gRPC stub for :meth:`NotificationChannelServiceClient.list_notification_channel_descriptors`. - - Lists the descriptors for supported channel types. The use of descriptors - makes it possible for new channel types to be dynamically added. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs[ - "notification_channel_service_stub" - ].ListNotificationChannelDescriptors - - @property - def get_notification_channel_descriptor(self): - """Return the gRPC stub for :meth:`NotificationChannelServiceClient.get_notification_channel_descriptor`. - - Gets a single channel descriptor. The descriptor indicates which fields - are expected / permitted for a notification channel of the given type. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs[ - "notification_channel_service_stub" - ].GetNotificationChannelDescriptor - - @property - def list_notification_channels(self): - """Return the gRPC stub for :meth:`NotificationChannelServiceClient.list_notification_channels`. - - Lists the notification channels that have been created for the project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["notification_channel_service_stub"].ListNotificationChannels - - @property - def get_notification_channel(self): - """Return the gRPC stub for :meth:`NotificationChannelServiceClient.get_notification_channel`. - - Gets a single notification channel. The channel includes the relevant - configuration details with which the channel was created. However, the - response may truncate or omit passwords, API keys, or other private key - matter and thus the response may not be 100% identical to the information - that was supplied in the call to the create method. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["notification_channel_service_stub"].GetNotificationChannel - - @property - def create_notification_channel(self): - """Return the gRPC stub for :meth:`NotificationChannelServiceClient.create_notification_channel`. - - Creates a new notification channel, representing a single notification - endpoint such as an email address, SMS number, or PagerDuty service. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs[ - "notification_channel_service_stub" - ].CreateNotificationChannel - - @property - def update_notification_channel(self): - """Return the gRPC stub for :meth:`NotificationChannelServiceClient.update_notification_channel`. - - Updates a notification channel. Fields not specified in the field mask - remain unchanged. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs[ - "notification_channel_service_stub" - ].UpdateNotificationChannel - - @property - def delete_notification_channel(self): - """Return the gRPC stub for :meth:`NotificationChannelServiceClient.delete_notification_channel`. - - Deletes a notification channel. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs[ - "notification_channel_service_stub" - ].DeleteNotificationChannel - - @property - def send_notification_channel_verification_code(self): - """Return the gRPC stub for :meth:`NotificationChannelServiceClient.send_notification_channel_verification_code`. - - Causes a verification code to be delivered to the channel. The code can - then be supplied in ``VerifyNotificationChannel`` to verify the channel. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs[ - "notification_channel_service_stub" - ].SendNotificationChannelVerificationCode - - @property - def get_notification_channel_verification_code(self): - """Return the gRPC stub for :meth:`NotificationChannelServiceClient.get_notification_channel_verification_code`. - - Requests a verification code for an already verified channel that can then - be used in a call to VerifyNotificationChannel() on a different channel - with an equivalent identity in the same or in a different project. This - makes it possible to copy a channel between projects without requiring - manual reverification of the channel. If the channel is not in the - verified state, this method will fail (in other words, this may only be - used if the SendNotificationChannelVerificationCode and - VerifyNotificationChannel paths have already been used to put the given - channel into the verified state). - - There is no guarantee that the verification codes returned by this method - will be of a similar structure or form as the ones that are delivered - to the channel via SendNotificationChannelVerificationCode; while - VerifyNotificationChannel() will recognize both the codes delivered via - SendNotificationChannelVerificationCode() and returned from - GetNotificationChannelVerificationCode(), it is typically the case that - the verification codes delivered via - SendNotificationChannelVerificationCode() will be shorter and also - have a shorter expiration (e.g. codes such as "G-123456") whereas - GetVerificationCode() will typically return a much longer, websafe base - 64 encoded string that has a longer expiration time. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs[ - "notification_channel_service_stub" - ].GetNotificationChannelVerificationCode - - @property - def verify_notification_channel(self): - """Return the gRPC stub for :meth:`NotificationChannelServiceClient.verify_notification_channel`. - - Verifies a ``NotificationChannel`` by proving receipt of the code - delivered to the channel as a result of calling - ``SendNotificationChannelVerificationCode``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs[ - "notification_channel_service_stub" - ].VerifyNotificationChannel diff --git a/monitoring/google/cloud/monitoring_v3/gapic/transports/service_monitoring_service_grpc_transport.py b/monitoring/google/cloud/monitoring_v3/gapic/transports/service_monitoring_service_grpc_transport.py deleted file mode 100644 index f28253b1b489..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/transports/service_monitoring_service_grpc_transport.py +++ /dev/null @@ -1,251 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.monitoring_v3.proto import service_service_pb2_grpc - - -class ServiceMonitoringServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.monitoring.v3 ServiceMonitoringService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/monitoring", - "https://www.googleapis.com/auth/monitoring.read", - "https://www.googleapis.com/auth/monitoring.write", - ) - - def __init__( - self, channel=None, credentials=None, address="monitoring.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "service_monitoring_service_stub": service_service_pb2_grpc.ServiceMonitoringServiceStub( - channel - ) - } - - @classmethod - def create_channel( - cls, address="monitoring.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_service(self): - """Return the gRPC stub for :meth:`ServiceMonitoringServiceClient.create_service`. - - Create a ``Service``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["service_monitoring_service_stub"].CreateService - - @property - def get_service(self): - """Return the gRPC stub for :meth:`ServiceMonitoringServiceClient.get_service`. - - Get the named ``Service``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["service_monitoring_service_stub"].GetService - - @property - def list_services(self): - """Return the gRPC stub for :meth:`ServiceMonitoringServiceClient.list_services`. - - List ``Service``\ s for this workspace. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["service_monitoring_service_stub"].ListServices - - @property - def update_service(self): - """Return the gRPC stub for :meth:`ServiceMonitoringServiceClient.update_service`. - - Update this ``Service``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["service_monitoring_service_stub"].UpdateService - - @property - def delete_service(self): - """Return the gRPC stub for :meth:`ServiceMonitoringServiceClient.delete_service`. - - Soft delete this ``Service``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["service_monitoring_service_stub"].DeleteService - - @property - def create_service_level_objective(self): - """Return the gRPC stub for :meth:`ServiceMonitoringServiceClient.create_service_level_objective`. - - Create a ``ServiceLevelObjective`` for the given ``Service``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs[ - "service_monitoring_service_stub" - ].CreateServiceLevelObjective - - @property - def get_service_level_objective(self): - """Return the gRPC stub for :meth:`ServiceMonitoringServiceClient.get_service_level_objective`. - - Get a ``ServiceLevelObjective`` by name. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["service_monitoring_service_stub"].GetServiceLevelObjective - - @property - def list_service_level_objectives(self): - """Return the gRPC stub for :meth:`ServiceMonitoringServiceClient.list_service_level_objectives`. - - List the ``ServiceLevelObjective``\ s for the given ``Service``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["service_monitoring_service_stub"].ListServiceLevelObjectives - - @property - def update_service_level_objective(self): - """Return the gRPC stub for :meth:`ServiceMonitoringServiceClient.update_service_level_objective`. - - Update the given ``ServiceLevelObjective``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs[ - "service_monitoring_service_stub" - ].UpdateServiceLevelObjective - - @property - def delete_service_level_objective(self): - """Return the gRPC stub for :meth:`ServiceMonitoringServiceClient.delete_service_level_objective`. - - Delete the given ``ServiceLevelObjective``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs[ - "service_monitoring_service_stub" - ].DeleteServiceLevelObjective diff --git a/monitoring/google/cloud/monitoring_v3/gapic/transports/uptime_check_service_grpc_transport.py b/monitoring/google/cloud/monitoring_v3/gapic/transports/uptime_check_service_grpc_transport.py deleted file mode 100644 index 80aca866c496..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/transports/uptime_check_service_grpc_transport.py +++ /dev/null @@ -1,199 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.monitoring_v3.proto import uptime_service_pb2_grpc - - -class UptimeCheckServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.monitoring.v3 UptimeCheckService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/monitoring", - "https://www.googleapis.com/auth/monitoring.read", - "https://www.googleapis.com/auth/monitoring.write", - ) - - def __init__( - self, channel=None, credentials=None, address="monitoring.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "uptime_check_service_stub": uptime_service_pb2_grpc.UptimeCheckServiceStub( - channel - ) - } - - @classmethod - def create_channel( - cls, address="monitoring.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def list_uptime_check_configs(self): - """Return the gRPC stub for :meth:`UptimeCheckServiceClient.list_uptime_check_configs`. - - Lists the existing valid Uptime check configurations for the project - (leaving out any invalid configurations). - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["uptime_check_service_stub"].ListUptimeCheckConfigs - - @property - def get_uptime_check_config(self): - """Return the gRPC stub for :meth:`UptimeCheckServiceClient.get_uptime_check_config`. - - Gets a single Uptime check configuration. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["uptime_check_service_stub"].GetUptimeCheckConfig - - @property - def create_uptime_check_config(self): - """Return the gRPC stub for :meth:`UptimeCheckServiceClient.create_uptime_check_config`. - - Creates a new Uptime check configuration. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["uptime_check_service_stub"].CreateUptimeCheckConfig - - @property - def update_uptime_check_config(self): - """Return the gRPC stub for :meth:`UptimeCheckServiceClient.update_uptime_check_config`. - - Updates an Uptime check configuration. You can either replace the entire - configuration with a new one or replace only certain fields in the - current configuration by specifying the fields to be updated via - ``updateMask``. Returns the updated configuration. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["uptime_check_service_stub"].UpdateUptimeCheckConfig - - @property - def delete_uptime_check_config(self): - """Return the gRPC stub for :meth:`UptimeCheckServiceClient.delete_uptime_check_config`. - - Deletes an Uptime check configuration. Note that this method will fail - if the Uptime check configuration is referenced by an alert policy or - other dependent configs that would be rendered invalid by the deletion. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["uptime_check_service_stub"].DeleteUptimeCheckConfig - - @property - def list_uptime_check_ips(self): - """Return the gRPC stub for :meth:`UptimeCheckServiceClient.list_uptime_check_ips`. - - Returns the list of IP addresses that checkers run from - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["uptime_check_service_stub"].ListUptimeCheckIps diff --git a/monitoring/google/cloud/monitoring_v3/gapic/uptime_check_service_client.py b/monitoring/google/cloud/monitoring_v3/gapic/uptime_check_service_client.py deleted file mode 100644 index 45f327bb11e1..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/uptime_check_service_client.py +++ /dev/null @@ -1,758 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.monitoring.v3 UptimeCheckService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.api import metric_pb2 as api_metric_pb2 -from google.api import monitored_resource_pb2 -from google.cloud.monitoring_v3.gapic import enums -from google.cloud.monitoring_v3.gapic import uptime_check_service_client_config -from google.cloud.monitoring_v3.gapic.transports import ( - uptime_check_service_grpc_transport, -) -from google.cloud.monitoring_v3.proto import alert_pb2 -from google.cloud.monitoring_v3.proto import alert_service_pb2 -from google.cloud.monitoring_v3.proto import alert_service_pb2_grpc -from google.cloud.monitoring_v3.proto import common_pb2 -from google.cloud.monitoring_v3.proto import group_pb2 -from google.cloud.monitoring_v3.proto import group_service_pb2 -from google.cloud.monitoring_v3.proto import group_service_pb2_grpc -from google.cloud.monitoring_v3.proto import metric_pb2 as proto_metric_pb2 -from google.cloud.monitoring_v3.proto import metric_service_pb2 -from google.cloud.monitoring_v3.proto import metric_service_pb2_grpc -from google.cloud.monitoring_v3.proto import notification_pb2 -from google.cloud.monitoring_v3.proto import notification_service_pb2 -from google.cloud.monitoring_v3.proto import notification_service_pb2_grpc -from google.cloud.monitoring_v3.proto import service_pb2 -from google.cloud.monitoring_v3.proto import service_service_pb2 -from google.cloud.monitoring_v3.proto import service_service_pb2_grpc -from google.cloud.monitoring_v3.proto import uptime_pb2 -from google.cloud.monitoring_v3.proto import uptime_service_pb2 -from google.cloud.monitoring_v3.proto import uptime_service_pb2_grpc -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-monitoring" -).version - - -class UptimeCheckServiceClient(object): - """ - The UptimeCheckService API is used to manage (list, create, delete, - edit) Uptime check configurations in the Stackdriver Monitoring product. - An Uptime check is a piece of configuration that determines which - resources and services to monitor for availability. These configurations - can also be configured interactively by navigating to the [Cloud - Console] (http://console.cloud.google.com), selecting the appropriate - project, clicking on "Monitoring" on the left-hand side to navigate to - Stackdriver, and then clicking on "Uptime". - """ - - SERVICE_ADDRESS = "monitoring.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.monitoring.v3.UptimeCheckService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - UptimeCheckServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - - @classmethod - def uptime_check_config_path(cls, project, uptime_check_config): - """Return a fully-qualified uptime_check_config string.""" - return google.api_core.path_template.expand( - "projects/{project}/uptimeCheckConfigs/{uptime_check_config}", - project=project, - uptime_check_config=uptime_check_config, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.UptimeCheckServiceGrpcTransport, - Callable[[~.Credentials, type], ~.UptimeCheckServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = uptime_check_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=uptime_check_service_grpc_transport.UptimeCheckServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = uptime_check_service_grpc_transport.UptimeCheckServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def list_uptime_check_configs( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the existing valid Uptime check configurations for the project - (leaving out any invalid configurations). - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.UptimeCheckServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_uptime_check_configs(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_uptime_check_configs(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): The project whose Uptime check configurations are listed. The format is - ``projects/[PROJECT_ID]``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.monitoring_v3.types.UptimeCheckConfig` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "list_uptime_check_configs" not in self._inner_api_calls: - self._inner_api_calls[ - "list_uptime_check_configs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_uptime_check_configs, - default_retry=self._method_configs["ListUptimeCheckConfigs"].retry, - default_timeout=self._method_configs["ListUptimeCheckConfigs"].timeout, - client_info=self._client_info, - ) - - request = uptime_service_pb2.ListUptimeCheckConfigsRequest( - parent=parent, page_size=page_size - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_uptime_check_configs"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="uptime_check_configs", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_uptime_check_config( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a single Uptime check configuration. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.UptimeCheckServiceClient() - >>> - >>> name = client.uptime_check_config_path('[PROJECT]', '[UPTIME_CHECK_CONFIG]') - >>> - >>> response = client.get_uptime_check_config(name) - - Args: - name (str): The Uptime check configuration to retrieve. The format is - ``projects/[PROJECT_ID]/uptimeCheckConfigs/[UPTIME_CHECK_ID]``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.UptimeCheckConfig` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "get_uptime_check_config" not in self._inner_api_calls: - self._inner_api_calls[ - "get_uptime_check_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_uptime_check_config, - default_retry=self._method_configs["GetUptimeCheckConfig"].retry, - default_timeout=self._method_configs["GetUptimeCheckConfig"].timeout, - client_info=self._client_info, - ) - - request = uptime_service_pb2.GetUptimeCheckConfigRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_uptime_check_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_uptime_check_config( - self, - parent, - uptime_check_config, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new Uptime check configuration. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.UptimeCheckServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `uptime_check_config`: - >>> uptime_check_config = {} - >>> - >>> response = client.create_uptime_check_config(parent, uptime_check_config) - - Args: - parent (str): The project in which to create the Uptime check. The format is - ``projects/[PROJECT_ID]``. - uptime_check_config (Union[dict, ~google.cloud.monitoring_v3.types.UptimeCheckConfig]): The new Uptime check configuration. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.UptimeCheckConfig` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.UptimeCheckConfig` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "create_uptime_check_config" not in self._inner_api_calls: - self._inner_api_calls[ - "create_uptime_check_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_uptime_check_config, - default_retry=self._method_configs["CreateUptimeCheckConfig"].retry, - default_timeout=self._method_configs["CreateUptimeCheckConfig"].timeout, - client_info=self._client_info, - ) - - request = uptime_service_pb2.CreateUptimeCheckConfigRequest( - parent=parent, uptime_check_config=uptime_check_config - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_uptime_check_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_uptime_check_config( - self, - uptime_check_config, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates an Uptime check configuration. You can either replace the entire - configuration with a new one or replace only certain fields in the - current configuration by specifying the fields to be updated via - ``updateMask``. Returns the updated configuration. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.UptimeCheckServiceClient() - >>> - >>> # TODO: Initialize `uptime_check_config`: - >>> uptime_check_config = {} - >>> - >>> response = client.update_uptime_check_config(uptime_check_config) - - Args: - uptime_check_config (Union[dict, ~google.cloud.monitoring_v3.types.UptimeCheckConfig]): Required. If an ``updateMask`` has been specified, this field gives the - values for the set of fields mentioned in the ``updateMask``. If an - ``updateMask`` has not been given, this Uptime check configuration - replaces the current configuration. If a field is mentioned in - ``updateMask`` but the corresonding field is omitted in this partial - Uptime check configuration, it has the effect of deleting/clearing the - field from the configuration on the server. - - The following fields can be updated: ``display_name``, ``http_check``, - ``tcp_check``, ``timeout``, ``content_matchers``, and - ``selected_regions``. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.UptimeCheckConfig` - update_mask (Union[dict, ~google.cloud.monitoring_v3.types.FieldMask]): Optional. If present, only the listed fields in the current Uptime check - configuration are updated with values from the new configuration. If this - field is empty, then the current configuration is completely replaced with - the new configuration. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_v3.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_v3.types.UptimeCheckConfig` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "update_uptime_check_config" not in self._inner_api_calls: - self._inner_api_calls[ - "update_uptime_check_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_uptime_check_config, - default_retry=self._method_configs["UpdateUptimeCheckConfig"].retry, - default_timeout=self._method_configs["UpdateUptimeCheckConfig"].timeout, - client_info=self._client_info, - ) - - request = uptime_service_pb2.UpdateUptimeCheckConfigRequest( - uptime_check_config=uptime_check_config, update_mask=update_mask - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("uptime_check_config.name", uptime_check_config.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_uptime_check_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_uptime_check_config( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an Uptime check configuration. Note that this method will fail - if the Uptime check configuration is referenced by an alert policy or - other dependent configs that would be rendered invalid by the deletion. - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.UptimeCheckServiceClient() - >>> - >>> name = client.uptime_check_config_path('[PROJECT]', '[UPTIME_CHECK_CONFIG]') - >>> - >>> client.delete_uptime_check_config(name) - - Args: - name (str): The Uptime check configuration to delete. The format is - ``projects/[PROJECT_ID]/uptimeCheckConfigs/[UPTIME_CHECK_ID]``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "delete_uptime_check_config" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_uptime_check_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_uptime_check_config, - default_retry=self._method_configs["DeleteUptimeCheckConfig"].retry, - default_timeout=self._method_configs["DeleteUptimeCheckConfig"].timeout, - client_info=self._client_info, - ) - - request = uptime_service_pb2.DeleteUptimeCheckConfigRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_uptime_check_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_uptime_check_ips( - self, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns the list of IP addresses that checkers run from - - Example: - >>> from google.cloud import monitoring_v3 - >>> - >>> client = monitoring_v3.UptimeCheckServiceClient() - >>> - >>> # Iterate over all results - >>> for element in client.list_uptime_check_ips(): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_uptime_check_ips().pages: - ... for element in page: - ... # process element - ... pass - - Args: - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.monitoring_v3.types.UptimeCheckIp` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - if metadata is None: - metadata = [] - metadata = list(metadata) - # Wrap the transport method to add retry and timeout logic. - if "list_uptime_check_ips" not in self._inner_api_calls: - self._inner_api_calls[ - "list_uptime_check_ips" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_uptime_check_ips, - default_retry=self._method_configs["ListUptimeCheckIps"].retry, - default_timeout=self._method_configs["ListUptimeCheckIps"].timeout, - client_info=self._client_info, - ) - - request = uptime_service_pb2.ListUptimeCheckIpsRequest(page_size=page_size) - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_uptime_check_ips"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="uptime_check_ips", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator diff --git a/monitoring/google/cloud/monitoring_v3/gapic/uptime_check_service_client_config.py b/monitoring/google/cloud/monitoring_v3/gapic/uptime_check_service_client_config.py deleted file mode 100644 index 0be9a06c1184..000000000000 --- a/monitoring/google/cloud/monitoring_v3/gapic/uptime_check_service_client_config.py +++ /dev/null @@ -1,53 +0,0 @@ -config = { - "interfaces": { - "google.monitoring.v3.UptimeCheckService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "ListUptimeCheckConfigs": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetUptimeCheckConfig": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateUptimeCheckConfig": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateUptimeCheckConfig": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteUptimeCheckConfig": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListUptimeCheckIps": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/__init__.py b/monitoring/google/cloud/monitoring_v3/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/monitoring/google/cloud/monitoring_v3/proto/alert.proto b/monitoring/google/cloud/monitoring_v3/proto/alert.proto deleted file mode 100644 index fc811a01c48d..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/alert.proto +++ /dev/null @@ -1,339 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -import "google/monitoring/v3/common.proto"; -import "google/monitoring/v3/mutation_record.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/wrappers.proto"; -import "google/rpc/status.proto"; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "AlertProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// A description of the conditions under which some aspect of your system is -// considered to be "unhealthy" and the ways to notify people or services about -// this state. For an overview of alert policies, see -// [Introduction to Alerting](/monitoring/alerts/). -message AlertPolicy { - // A content string and a MIME type that describes the content string's - // format. - message Documentation { - // The text of the documentation, interpreted according to `mime_type`. - // The content may not exceed 8,192 Unicode characters and may not exceed - // more than 10,240 bytes when encoded in UTF-8 format, whichever is - // smaller. - string content = 1; - - // The format of the `content` field. Presently, only the value - // `"text/markdown"` is supported. See - // [Markdown](https://en.wikipedia.org/wiki/Markdown) for more information. - string mime_type = 2; - } - - // A condition is a true/false test that determines when an alerting policy - // should open an incident. If a condition evaluates to true, it signifies - // that something is wrong. - message Condition { - // Specifies how many time series must fail a predicate to trigger a - // condition. If not specified, then a `{count: 1}` trigger is used. - message Trigger { - // A type of trigger. - oneof type { - // The absolute number of time series that must fail - // the predicate for the condition to be triggered. - int32 count = 1; - - // The percentage of time series that must fail the - // predicate for the condition to be triggered. - double percent = 2; - } - } - - // A condition type that compares a collection of time series - // against a threshold. - message MetricThreshold { - // A [filter](/monitoring/api/v3/filters) that - // identifies which time series should be compared with the threshold. - // - // The filter is similar to the one that is specified in the - // [`ListTimeSeries` - // request](/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list) (that - // call is useful to verify the time series that will be retrieved / - // processed) and must specify the metric type and optionally may contain - // restrictions on resource type, resource labels, and metric labels. - // This field may not exceed 2048 Unicode characters in length. - string filter = 2; - - // Specifies the alignment of data points in individual time series as - // well as how to combine the retrieved time series together (such as - // when aggregating multiple streams on each resource to a single - // stream for each resource or when aggregating streams across all - // members of a group of resrouces). Multiple aggregations - // are applied in the order specified. - // - // This field is similar to the one in the [`ListTimeSeries` - // request](/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). It - // is advisable to use the `ListTimeSeries` method when debugging this - // field. - repeated Aggregation aggregations = 8; - - // A [filter](/monitoring/api/v3/filters) that identifies a time - // series that should be used as the denominator of a ratio that will be - // compared with the threshold. If a `denominator_filter` is specified, - // the time series specified by the `filter` field will be used as the - // numerator. - // - // The filter must specify the metric type and optionally may contain - // restrictions on resource type, resource labels, and metric labels. - // This field may not exceed 2048 Unicode characters in length. - string denominator_filter = 9; - - // Specifies the alignment of data points in individual time series - // selected by `denominatorFilter` as - // well as how to combine the retrieved time series together (such as - // when aggregating multiple streams on each resource to a single - // stream for each resource or when aggregating streams across all - // members of a group of resources). - // - // When computing ratios, the `aggregations` and - // `denominator_aggregations` fields must use the same alignment period - // and produce time series that have the same periodicity and labels. - repeated Aggregation denominator_aggregations = 10; - - // The comparison to apply between the time series (indicated by `filter` - // and `aggregation`) and the threshold (indicated by `threshold_value`). - // The comparison is applied on each time series, with the time series - // on the left-hand side and the threshold on the right-hand side. - // - // Only `COMPARISON_LT` and `COMPARISON_GT` are supported currently. - ComparisonType comparison = 4; - - // A value against which to compare the time series. - double threshold_value = 5; - - // The amount of time that a time series must violate the - // threshold to be considered failing. Currently, only values - // that are a multiple of a minute--e.g., 0, 60, 120, or 300 - // seconds--are supported. If an invalid value is given, an - // error will be returned. When choosing a duration, it is useful to - // keep in mind the frequency of the underlying time series data - // (which may also be affected by any alignments specified in the - // `aggregations` field); a good duration is long enough so that a single - // outlier does not generate spurious alerts, but short enough that - // unhealthy states are detected and alerted on quickly. - google.protobuf.Duration duration = 6; - - // The number/percent of time series for which the comparison must hold - // in order for the condition to trigger. If unspecified, then the - // condition will trigger if the comparison is true for any of the - // time series that have been identified by `filter` and `aggregations`, - // or by the ratio, if `denominator_filter` and `denominator_aggregations` - // are specified. - Trigger trigger = 7; - } - - // A condition type that checks that monitored resources - // are reporting data. The configuration defines a metric and - // a set of monitored resources. The predicate is considered in violation - // when a time series for the specified metric of a monitored - // resource does not include any data in the specified `duration`. - message MetricAbsence { - // A [filter](/monitoring/api/v3/filters) that - // identifies which time series should be compared with the threshold. - // - // The filter is similar to the one that is specified in the - // [`ListTimeSeries` - // request](/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list) (that - // call is useful to verify the time series that will be retrieved / - // processed) and must specify the metric type and optionally may contain - // restrictions on resource type, resource labels, and metric labels. - // This field may not exceed 2048 Unicode characters in length. - string filter = 1; - - // Specifies the alignment of data points in individual time series as - // well as how to combine the retrieved time series together (such as - // when aggregating multiple streams on each resource to a single - // stream for each resource or when aggregating streams across all - // members of a group of resrouces). Multiple aggregations - // are applied in the order specified. - // - // This field is similar to the one in the [`ListTimeSeries` - // request](/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). It - // is advisable to use the `ListTimeSeries` method when debugging this - // field. - repeated Aggregation aggregations = 5; - - // The amount of time that a time series must fail to report new - // data to be considered failing. Currently, only values that - // are a multiple of a minute--e.g. 60, 120, or 300 - // seconds--are supported. If an invalid value is given, an - // error will be returned. The `Duration.nanos` field is - // ignored. - google.protobuf.Duration duration = 2; - - // The number/percent of time series for which the comparison must hold - // in order for the condition to trigger. If unspecified, then the - // condition will trigger if the comparison is true for any of the - // time series that have been identified by `filter` and `aggregations`. - Trigger trigger = 3; - } - - // Required if the condition exists. The unique resource name for this - // condition. Its syntax is: - // - // projects/[PROJECT_ID]/alertPolicies/[POLICY_ID]/conditions/[CONDITION_ID] - // - // `[CONDITION_ID]` is assigned by Stackdriver Monitoring when the - // condition is created as part of a new or updated alerting policy. - // - // When calling the - // [alertPolicies.create][google.monitoring.v3.AlertPolicyService.CreateAlertPolicy] - // method, do not include the `name` field in the conditions of the - // requested alerting policy. Stackdriver Monitoring creates the - // condition identifiers and includes them in the new policy. - // - // When calling the - // [alertPolicies.update][google.monitoring.v3.AlertPolicyService.UpdateAlertPolicy] - // method to update a policy, including a condition `name` causes the - // existing condition to be updated. Conditions without names are added to - // the updated policy. Existing conditions are deleted if they are not - // updated. - // - // Best practice is to preserve `[CONDITION_ID]` if you make only small - // changes, such as those to condition thresholds, durations, or trigger - // values. Otherwise, treat the change as a new condition and let the - // existing condition be deleted. - string name = 12; - - // A short name or phrase used to identify the condition in dashboards, - // notifications, and incidents. To avoid confusion, don't use the same - // display name for multiple conditions in the same policy. - string display_name = 6; - - // Only one of the following condition types will be specified. - oneof condition { - // A condition that compares a time series against a threshold. - MetricThreshold condition_threshold = 1; - - // A condition that checks that a time series continues to - // receive new data points. - MetricAbsence condition_absent = 2; - } - } - - // Operators for combining conditions. - enum ConditionCombinerType { - // An unspecified combiner. - COMBINE_UNSPECIFIED = 0; - - // Combine conditions using the logical `AND` operator. An - // incident is created only if all conditions are met - // simultaneously. This combiner is satisfied if all conditions are - // met, even if they are met on completely different resources. - AND = 1; - - // Combine conditions using the logical `OR` operator. An incident - // is created if any of the listed conditions is met. - OR = 2; - - // Combine conditions using logical `AND` operator, but unlike the regular - // `AND` option, an incident is created only if all conditions are met - // simultaneously on at least one resource. - AND_WITH_MATCHING_RESOURCE = 3; - } - - // Required if the policy exists. The resource name for this policy. The - // syntax is: - // - // projects/[PROJECT_ID]/alertPolicies/[ALERT_POLICY_ID] - // - // `[ALERT_POLICY_ID]` is assigned by Stackdriver Monitoring when the policy - // is created. When calling the - // [alertPolicies.create][google.monitoring.v3.AlertPolicyService.CreateAlertPolicy] - // method, do not include the `name` field in the alerting policy passed as - // part of the request. - string name = 1; - - // A short name or phrase used to identify the policy in dashboards, - // notifications, and incidents. To avoid confusion, don't use the same - // display name for multiple policies in the same project. The name is - // limited to 512 Unicode characters. - string display_name = 2; - - // Documentation that is included with notifications and incidents related to - // this policy. Best practice is for the documentation to include information - // to help responders understand, mitigate, escalate, and correct the - // underlying problems detected by the alerting policy. Notification channels - // that have limited capacity might not show this documentation. - Documentation documentation = 13; - - // User-supplied key/value data to be used for organizing and - // identifying the `AlertPolicy` objects. - // - // The field can contain up to 64 entries. Each key and value is limited to - // 63 Unicode characters or 128 bytes, whichever is smaller. Labels and - // values can contain only lowercase letters, numerals, underscores, and - // dashes. Keys must begin with a letter. - map user_labels = 16; - - // A list of conditions for the policy. The conditions are combined by AND or - // OR according to the `combiner` field. If the combined conditions evaluate - // to true, then an incident is created. A policy can have from one to six - // conditions. - repeated Condition conditions = 12; - - // How to combine the results of multiple conditions to determine if an - // incident should be opened. - ConditionCombinerType combiner = 6; - - // Whether or not the policy is enabled. On write, the default interpretation - // if unset is that the policy is enabled. On read, clients should not make - // any assumption about the state if it has not been populated. The - // field should always be populated on List and Get operations, unless - // a field projection has been specified that strips it out. - google.protobuf.BoolValue enabled = 17; - - // Read-only description of how the alert policy is invalid. OK if the alert - // policy is valid. If not OK, the alert policy will not generate incidents. - google.rpc.Status validity = 18; - - // Identifies the notification channels to which notifications should be sent - // when incidents are opened or closed or when new violations occur on - // an already opened incident. Each element of this array corresponds to - // the `name` field in each of the - // [`NotificationChannel`][google.monitoring.v3.NotificationChannel] - // objects that are returned from the [`ListNotificationChannels`] - // [google.monitoring.v3.NotificationChannelService.ListNotificationChannels] - // method. The syntax of the entries in this field is: - // - // projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID] - repeated string notification_channels = 14; - - // A read-only record of the creation of the alerting policy. If provided - // in a call to create or update, this field will be ignored. - MutationRecord creation_record = 10; - - // A read-only record of the most recent change to the alerting policy. If - // provided in a call to create or update, this field will be ignored. - MutationRecord mutation_record = 11; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/alert_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/alert_pb2.py deleted file mode 100644 index 203b7f523d70..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/alert_pb2.py +++ /dev/null @@ -1,1276 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/alert.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.cloud.monitoring_v3.proto import ( - common_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2, -) -from google.cloud.monitoring_v3.proto import ( - mutation_record_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_mutation__record__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/alert.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\nAlertProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n,google/cloud/monitoring_v3/proto/alert.proto\x12\x14google.monitoring.v3\x1a-google/cloud/monitoring_v3/proto/common.proto\x1a\x36google/cloud/monitoring_v3/proto/mutation_record.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/rpc/status.proto"\x83\r\n\x0b\x41lertPolicy\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x46\n\rdocumentation\x18\r \x01(\x0b\x32/.google.monitoring.v3.AlertPolicy.Documentation\x12\x46\n\x0buser_labels\x18\x10 \x03(\x0b\x32\x31.google.monitoring.v3.AlertPolicy.UserLabelsEntry\x12?\n\nconditions\x18\x0c \x03(\x0b\x32+.google.monitoring.v3.AlertPolicy.Condition\x12I\n\x08\x63ombiner\x18\x06 \x01(\x0e\x32\x37.google.monitoring.v3.AlertPolicy.ConditionCombinerType\x12+\n\x07\x65nabled\x18\x11 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12$\n\x08validity\x18\x12 \x01(\x0b\x32\x12.google.rpc.Status\x12\x1d\n\x15notification_channels\x18\x0e \x03(\t\x12=\n\x0f\x63reation_record\x18\n \x01(\x0b\x32$.google.monitoring.v3.MutationRecord\x12=\n\x0fmutation_record\x18\x0b \x01(\x0b\x32$.google.monitoring.v3.MutationRecord\x1a\x33\n\rDocumentation\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x11\n\tmime_type\x18\x02 \x01(\t\x1a\xf8\x06\n\tCondition\x12\x0c\n\x04name\x18\x0c \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x06 \x01(\t\x12Z\n\x13\x63ondition_threshold\x18\x01 \x01(\x0b\x32;.google.monitoring.v3.AlertPolicy.Condition.MetricThresholdH\x00\x12U\n\x10\x63ondition_absent\x18\x02 \x01(\x0b\x32\x39.google.monitoring.v3.AlertPolicy.Condition.MetricAbsenceH\x00\x1a\x35\n\x07Trigger\x12\x0f\n\x05\x63ount\x18\x01 \x01(\x05H\x00\x12\x11\n\x07percent\x18\x02 \x01(\x01H\x00\x42\x06\n\x04type\x1a\x81\x03\n\x0fMetricThreshold\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x37\n\x0c\x61ggregations\x18\x08 \x03(\x0b\x32!.google.monitoring.v3.Aggregation\x12\x1a\n\x12\x64\x65nominator_filter\x18\t \x01(\t\x12\x43\n\x18\x64\x65nominator_aggregations\x18\n \x03(\x0b\x32!.google.monitoring.v3.Aggregation\x12\x38\n\ncomparison\x18\x04 \x01(\x0e\x32$.google.monitoring.v3.ComparisonType\x12\x17\n\x0fthreshold_value\x18\x05 \x01(\x01\x12+\n\x08\x64uration\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x44\n\x07trigger\x18\x07 \x01(\x0b\x32\x33.google.monitoring.v3.AlertPolicy.Condition.Trigger\x1a\xcb\x01\n\rMetricAbsence\x12\x0e\n\x06\x66ilter\x18\x01 \x01(\t\x12\x37\n\x0c\x61ggregations\x18\x05 \x03(\x0b\x32!.google.monitoring.v3.Aggregation\x12+\n\x08\x64uration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x44\n\x07trigger\x18\x03 \x01(\x0b\x32\x33.google.monitoring.v3.AlertPolicy.Condition.TriggerB\x0b\n\tcondition\x1a\x31\n\x0fUserLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"a\n\x15\x43onditionCombinerType\x12\x17\n\x13\x43OMBINE_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\x12\x06\n\x02OR\x10\x02\x12\x1e\n\x1a\x41ND_WITH_MATCHING_RESOURCE\x10\x03\x42\xa2\x01\n\x18\x63om.google.monitoring.v3B\nAlertProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), - dependencies=[ - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_mutation__record__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - ], -) - - -_ALERTPOLICY_CONDITIONCOMBINERTYPE = _descriptor.EnumDescriptor( - name="ConditionCombinerType", - full_name="google.monitoring.v3.AlertPolicy.ConditionCombinerType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="COMBINE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="AND", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="OR", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="AND_WITH_MATCHING_RESOURCE", - index=3, - number=3, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1833, - serialized_end=1930, -) -_sym_db.RegisterEnumDescriptor(_ALERTPOLICY_CONDITIONCOMBINERTYPE) - - -_ALERTPOLICY_DOCUMENTATION = _descriptor.Descriptor( - name="Documentation", - full_name="google.monitoring.v3.AlertPolicy.Documentation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="content", - full_name="google.monitoring.v3.AlertPolicy.Documentation.content", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="mime_type", - full_name="google.monitoring.v3.AlertPolicy.Documentation.mime_type", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=838, - serialized_end=889, -) - -_ALERTPOLICY_CONDITION_TRIGGER = _descriptor.Descriptor( - name="Trigger", - full_name="google.monitoring.v3.AlertPolicy.Condition.Trigger", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="count", - full_name="google.monitoring.v3.AlertPolicy.Condition.Trigger.count", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="percent", - full_name="google.monitoring.v3.AlertPolicy.Condition.Trigger.percent", - index=1, - number=2, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="google.monitoring.v3.AlertPolicy.Condition.Trigger.type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=1120, - serialized_end=1173, -) - -_ALERTPOLICY_CONDITION_METRICTHRESHOLD = _descriptor.Descriptor( - name="MetricThreshold", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricThreshold", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="filter", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricThreshold.filter", - index=0, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="aggregations", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricThreshold.aggregations", - index=1, - number=8, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="denominator_filter", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricThreshold.denominator_filter", - index=2, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="denominator_aggregations", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricThreshold.denominator_aggregations", - index=3, - number=10, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="comparison", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricThreshold.comparison", - index=4, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="threshold_value", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricThreshold.threshold_value", - index=5, - number=5, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="duration", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricThreshold.duration", - index=6, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="trigger", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricThreshold.trigger", - index=7, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1176, - serialized_end=1561, -) - -_ALERTPOLICY_CONDITION_METRICABSENCE = _descriptor.Descriptor( - name="MetricAbsence", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricAbsence", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="filter", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricAbsence.filter", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="aggregations", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricAbsence.aggregations", - index=1, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="duration", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricAbsence.duration", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="trigger", - full_name="google.monitoring.v3.AlertPolicy.Condition.MetricAbsence.trigger", - index=3, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1564, - serialized_end=1767, -) - -_ALERTPOLICY_CONDITION = _descriptor.Descriptor( - name="Condition", - full_name="google.monitoring.v3.AlertPolicy.Condition", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.AlertPolicy.Condition.name", - index=0, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.monitoring.v3.AlertPolicy.Condition.display_name", - index=1, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="condition_threshold", - full_name="google.monitoring.v3.AlertPolicy.Condition.condition_threshold", - index=2, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="condition_absent", - full_name="google.monitoring.v3.AlertPolicy.Condition.condition_absent", - index=3, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _ALERTPOLICY_CONDITION_TRIGGER, - _ALERTPOLICY_CONDITION_METRICTHRESHOLD, - _ALERTPOLICY_CONDITION_METRICABSENCE, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="condition", - full_name="google.monitoring.v3.AlertPolicy.Condition.condition", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=892, - serialized_end=1780, -) - -_ALERTPOLICY_USERLABELSENTRY = _descriptor.Descriptor( - name="UserLabelsEntry", - full_name="google.monitoring.v3.AlertPolicy.UserLabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.monitoring.v3.AlertPolicy.UserLabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.monitoring.v3.AlertPolicy.UserLabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1782, - serialized_end=1831, -) - -_ALERTPOLICY = _descriptor.Descriptor( - name="AlertPolicy", - full_name="google.monitoring.v3.AlertPolicy", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.AlertPolicy.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.monitoring.v3.AlertPolicy.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="documentation", - full_name="google.monitoring.v3.AlertPolicy.documentation", - index=2, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="user_labels", - full_name="google.monitoring.v3.AlertPolicy.user_labels", - index=3, - number=16, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="conditions", - full_name="google.monitoring.v3.AlertPolicy.conditions", - index=4, - number=12, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="combiner", - full_name="google.monitoring.v3.AlertPolicy.combiner", - index=5, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="enabled", - full_name="google.monitoring.v3.AlertPolicy.enabled", - index=6, - number=17, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="validity", - full_name="google.monitoring.v3.AlertPolicy.validity", - index=7, - number=18, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="notification_channels", - full_name="google.monitoring.v3.AlertPolicy.notification_channels", - index=8, - number=14, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="creation_record", - full_name="google.monitoring.v3.AlertPolicy.creation_record", - index=9, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="mutation_record", - full_name="google.monitoring.v3.AlertPolicy.mutation_record", - index=10, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _ALERTPOLICY_DOCUMENTATION, - _ALERTPOLICY_CONDITION, - _ALERTPOLICY_USERLABELSENTRY, - ], - enum_types=[_ALERTPOLICY_CONDITIONCOMBINERTYPE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=263, - serialized_end=1930, -) - -_ALERTPOLICY_DOCUMENTATION.containing_type = _ALERTPOLICY -_ALERTPOLICY_CONDITION_TRIGGER.containing_type = _ALERTPOLICY_CONDITION -_ALERTPOLICY_CONDITION_TRIGGER.oneofs_by_name["type"].fields.append( - _ALERTPOLICY_CONDITION_TRIGGER.fields_by_name["count"] -) -_ALERTPOLICY_CONDITION_TRIGGER.fields_by_name[ - "count" -].containing_oneof = _ALERTPOLICY_CONDITION_TRIGGER.oneofs_by_name["type"] -_ALERTPOLICY_CONDITION_TRIGGER.oneofs_by_name["type"].fields.append( - _ALERTPOLICY_CONDITION_TRIGGER.fields_by_name["percent"] -) -_ALERTPOLICY_CONDITION_TRIGGER.fields_by_name[ - "percent" -].containing_oneof = _ALERTPOLICY_CONDITION_TRIGGER.oneofs_by_name["type"] -_ALERTPOLICY_CONDITION_METRICTHRESHOLD.fields_by_name[ - "aggregations" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2._AGGREGATION -) -_ALERTPOLICY_CONDITION_METRICTHRESHOLD.fields_by_name[ - "denominator_aggregations" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2._AGGREGATION -) -_ALERTPOLICY_CONDITION_METRICTHRESHOLD.fields_by_name[ - "comparison" -].enum_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2._COMPARISONTYPE -) -_ALERTPOLICY_CONDITION_METRICTHRESHOLD.fields_by_name[ - "duration" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_ALERTPOLICY_CONDITION_METRICTHRESHOLD.fields_by_name[ - "trigger" -].message_type = _ALERTPOLICY_CONDITION_TRIGGER -_ALERTPOLICY_CONDITION_METRICTHRESHOLD.containing_type = _ALERTPOLICY_CONDITION -_ALERTPOLICY_CONDITION_METRICABSENCE.fields_by_name[ - "aggregations" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2._AGGREGATION -) -_ALERTPOLICY_CONDITION_METRICABSENCE.fields_by_name[ - "duration" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_ALERTPOLICY_CONDITION_METRICABSENCE.fields_by_name[ - "trigger" -].message_type = _ALERTPOLICY_CONDITION_TRIGGER -_ALERTPOLICY_CONDITION_METRICABSENCE.containing_type = _ALERTPOLICY_CONDITION -_ALERTPOLICY_CONDITION.fields_by_name[ - "condition_threshold" -].message_type = _ALERTPOLICY_CONDITION_METRICTHRESHOLD -_ALERTPOLICY_CONDITION.fields_by_name[ - "condition_absent" -].message_type = _ALERTPOLICY_CONDITION_METRICABSENCE -_ALERTPOLICY_CONDITION.containing_type = _ALERTPOLICY -_ALERTPOLICY_CONDITION.oneofs_by_name["condition"].fields.append( - _ALERTPOLICY_CONDITION.fields_by_name["condition_threshold"] -) -_ALERTPOLICY_CONDITION.fields_by_name[ - "condition_threshold" -].containing_oneof = _ALERTPOLICY_CONDITION.oneofs_by_name["condition"] -_ALERTPOLICY_CONDITION.oneofs_by_name["condition"].fields.append( - _ALERTPOLICY_CONDITION.fields_by_name["condition_absent"] -) -_ALERTPOLICY_CONDITION.fields_by_name[ - "condition_absent" -].containing_oneof = _ALERTPOLICY_CONDITION.oneofs_by_name["condition"] -_ALERTPOLICY_USERLABELSENTRY.containing_type = _ALERTPOLICY -_ALERTPOLICY.fields_by_name["documentation"].message_type = _ALERTPOLICY_DOCUMENTATION -_ALERTPOLICY.fields_by_name["user_labels"].message_type = _ALERTPOLICY_USERLABELSENTRY -_ALERTPOLICY.fields_by_name["conditions"].message_type = _ALERTPOLICY_CONDITION -_ALERTPOLICY.fields_by_name["combiner"].enum_type = _ALERTPOLICY_CONDITIONCOMBINERTYPE -_ALERTPOLICY.fields_by_name[ - "enabled" -].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE -_ALERTPOLICY.fields_by_name[ - "validity" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_ALERTPOLICY.fields_by_name[ - "creation_record" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_mutation__record__pb2._MUTATIONRECORD -) -_ALERTPOLICY.fields_by_name[ - "mutation_record" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_mutation__record__pb2._MUTATIONRECORD -) -_ALERTPOLICY_CONDITIONCOMBINERTYPE.containing_type = _ALERTPOLICY -DESCRIPTOR.message_types_by_name["AlertPolicy"] = _ALERTPOLICY -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -AlertPolicy = _reflection.GeneratedProtocolMessageType( - "AlertPolicy", - (_message.Message,), - dict( - Documentation=_reflection.GeneratedProtocolMessageType( - "Documentation", - (_message.Message,), - dict( - DESCRIPTOR=_ALERTPOLICY_DOCUMENTATION, - __module__="google.cloud.monitoring_v3.proto.alert_pb2", - __doc__="""A content string and a MIME type that describes the - content string's format. - - - Attributes: - content: - The text of the documentation, interpreted according to - ``mime_type``. The content may not exceed 8,192 Unicode - characters and may not exceed more than 10,240 bytes when - encoded in UTF-8 format, whichever is smaller. - mime_type: - The format of the ``content`` field. Presently, only the value - ``"text/markdown"`` is supported. See `Markdown - `__ for more - information. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.AlertPolicy.Documentation) - ), - ), - Condition=_reflection.GeneratedProtocolMessageType( - "Condition", - (_message.Message,), - dict( - Trigger=_reflection.GeneratedProtocolMessageType( - "Trigger", - (_message.Message,), - dict( - DESCRIPTOR=_ALERTPOLICY_CONDITION_TRIGGER, - __module__="google.cloud.monitoring_v3.proto.alert_pb2", - __doc__="""Specifies how many time series must fail a predicate to - trigger a condition. If not specified, then a ``{count: 1}`` trigger is - used. - - - Attributes: - type: - A type of trigger. - count: - The absolute number of time series that must fail the - predicate for the condition to be triggered. - percent: - The percentage of time series that must fail the predicate for - the condition to be triggered. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.AlertPolicy.Condition.Trigger) - ), - ), - MetricThreshold=_reflection.GeneratedProtocolMessageType( - "MetricThreshold", - (_message.Message,), - dict( - DESCRIPTOR=_ALERTPOLICY_CONDITION_METRICTHRESHOLD, - __module__="google.cloud.monitoring_v3.proto.alert_pb2", - __doc__="""A condition type that compares a collection of time series - against a threshold. - - - Attributes: - filter: - A `filter `__ that identifies - which time series should be compared with the threshold. The - filter is similar to the one that is specified in the - ```ListTimeSeries`` request - `__ - (that call is useful to verify the time series that will be - retrieved / processed) and must specify the metric type and - optionally may contain restrictions on resource type, resource - labels, and metric labels. This field may not exceed 2048 - Unicode characters in length. - aggregations: - Specifies the alignment of data points in individual time - series as well as how to combine the retrieved time series - together (such as when aggregating multiple streams on each - resource to a single stream for each resource or when - aggregating streams across all members of a group of - resrouces). Multiple aggregations are applied in the order - specified. This field is similar to the one in the - ```ListTimeSeries`` request - `__. - It is advisable to use the ``ListTimeSeries`` method when - debugging this field. - denominator_filter: - A `filter `__ that identifies a - time series that should be used as the denominator of a ratio - that will be compared with the threshold. If a - ``denominator_filter`` is specified, the time series specified - by the ``filter`` field will be used as the numerator. The - filter must specify the metric type and optionally may contain - restrictions on resource type, resource labels, and metric - labels. This field may not exceed 2048 Unicode characters in - length. - denominator_aggregations: - Specifies the alignment of data points in individual time - series selected by ``denominatorFilter`` as well as how to - combine the retrieved time series together (such as when - aggregating multiple streams on each resource to a single - stream for each resource or when aggregating streams across - all members of a group of resources). When computing ratios, - the ``aggregations`` and ``denominator_aggregations`` fields - must use the same alignment period and produce time series - that have the same periodicity and labels. - comparison: - The comparison to apply between the time series (indicated by - ``filter`` and ``aggregation``) and the threshold (indicated - by ``threshold_value``). The comparison is applied on each - time series, with the time series on the left-hand side and - the threshold on the right-hand side. Only ``COMPARISON_LT`` - and ``COMPARISON_GT`` are supported currently. - threshold_value: - A value against which to compare the time series. - duration: - The amount of time that a time series must violate the - threshold to be considered failing. Currently, only values - that are a multiple of a minute--e.g., 0, 60, 120, or 300 - seconds--are supported. If an invalid value is given, an error - will be returned. When choosing a duration, it is useful to - keep in mind the frequency of the underlying time series data - (which may also be affected by any alignments specified in the - ``aggregations`` field); a good duration is long enough so - that a single outlier does not generate spurious alerts, but - short enough that unhealthy states are detected and alerted on - quickly. - trigger: - The number/percent of time series for which the comparison - must hold in order for the condition to trigger. If - unspecified, then the condition will trigger if the comparison - is true for any of the time series that have been identified - by ``filter`` and ``aggregations``, or by the ratio, if - ``denominator_filter`` and ``denominator_aggregations`` are - specified. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.AlertPolicy.Condition.MetricThreshold) - ), - ), - MetricAbsence=_reflection.GeneratedProtocolMessageType( - "MetricAbsence", - (_message.Message,), - dict( - DESCRIPTOR=_ALERTPOLICY_CONDITION_METRICABSENCE, - __module__="google.cloud.monitoring_v3.proto.alert_pb2", - __doc__="""A condition type that checks that monitored resources are - reporting data. The configuration defines a metric and a set of - monitored resources. The predicate is considered in violation when a - time series for the specified metric of a monitored resource does not - include any data in the specified ``duration``. - - - Attributes: - filter: - A `filter `__ that identifies - which time series should be compared with the threshold. The - filter is similar to the one that is specified in the - ```ListTimeSeries`` request - `__ - (that call is useful to verify the time series that will be - retrieved / processed) and must specify the metric type and - optionally may contain restrictions on resource type, resource - labels, and metric labels. This field may not exceed 2048 - Unicode characters in length. - aggregations: - Specifies the alignment of data points in individual time - series as well as how to combine the retrieved time series - together (such as when aggregating multiple streams on each - resource to a single stream for each resource or when - aggregating streams across all members of a group of - resrouces). Multiple aggregations are applied in the order - specified. This field is similar to the one in the - ```ListTimeSeries`` request - `__. - It is advisable to use the ``ListTimeSeries`` method when - debugging this field. - duration: - The amount of time that a time series must fail to report new - data to be considered failing. Currently, only values that are - a multiple of a minute--e.g. 60, 120, or 300 seconds--are - supported. If an invalid value is given, an error will be - returned. The ``Duration.nanos`` field is ignored. - trigger: - The number/percent of time series for which the comparison - must hold in order for the condition to trigger. If - unspecified, then the condition will trigger if the comparison - is true for any of the time series that have been identified - by ``filter`` and ``aggregations``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.AlertPolicy.Condition.MetricAbsence) - ), - ), - DESCRIPTOR=_ALERTPOLICY_CONDITION, - __module__="google.cloud.monitoring_v3.proto.alert_pb2", - __doc__="""A condition is a true/false test that determines when an - alerting policy should open an incident. If a condition evaluates to - true, it signifies that something is wrong. - - - Attributes: - name: - Required if the condition exists. The unique resource name for - this condition. Its syntax is: :: projects/[PROJECT_ID]/ - alertPolicies/[POLICY_ID]/conditions/[CONDITION_ID] - ``[CONDITION_ID]`` is assigned by Stackdriver Monitoring when - the condition is created as part of a new or updated alerting - policy. When calling the [alertPolicies.create][google.monito - ring.v3.AlertPolicyService.CreateAlertPolicy] method, do not - include the ``name`` field in the conditions of the requested - alerting policy. Stackdriver Monitoring creates the condition - identifiers and includes them in the new policy. When calling - the [alertPolicies.update][google.monitoring.v3.AlertPolicySer - vice.UpdateAlertPolicy] method to update a policy, including a - condition ``name`` causes the existing condition to be - updated. Conditions without names are added to the updated - policy. Existing conditions are deleted if they are not - updated. Best practice is to preserve ``[CONDITION_ID]`` if - you make only small changes, such as those to condition - thresholds, durations, or trigger values. Otherwise, treat the - change as a new condition and let the existing condition be - deleted. - display_name: - A short name or phrase used to identify the condition in - dashboards, notifications, and incidents. To avoid confusion, - don't use the same display name for multiple conditions in the - same policy. - condition: - Only one of the following condition types will be specified. - condition_threshold: - A condition that compares a time series against a threshold. - condition_absent: - A condition that checks that a time series continues to - receive new data points. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.AlertPolicy.Condition) - ), - ), - UserLabelsEntry=_reflection.GeneratedProtocolMessageType( - "UserLabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_ALERTPOLICY_USERLABELSENTRY, - __module__="google.cloud.monitoring_v3.proto.alert_pb2" - # @@protoc_insertion_point(class_scope:google.monitoring.v3.AlertPolicy.UserLabelsEntry) - ), - ), - DESCRIPTOR=_ALERTPOLICY, - __module__="google.cloud.monitoring_v3.proto.alert_pb2", - __doc__="""A description of the conditions under which some aspect of - your system is considered to be "unhealthy" and the ways to notify - people or services about this state. For an overview of alert policies, - see `Introduction to Alerting `__. - - - Attributes: - name: - Required if the policy exists. The resource name for this - policy. The syntax is: :: - projects/[PROJECT_ID]/alertPolicies/[ALERT_POLICY_ID] - ``[ALERT_POLICY_ID]`` is assigned by Stackdriver Monitoring - when the policy is created. When calling the [alertPolicies.cr - eate][google.monitoring.v3.AlertPolicyService.CreateAlertPolic - y] method, do not include the ``name`` field in the alerting - policy passed as part of the request. - display_name: - A short name or phrase used to identify the policy in - dashboards, notifications, and incidents. To avoid confusion, - don't use the same display name for multiple policies in the - same project. The name is limited to 512 Unicode characters. - documentation: - Documentation that is included with notifications and - incidents related to this policy. Best practice is for the - documentation to include information to help responders - understand, mitigate, escalate, and correct the underlying - problems detected by the alerting policy. Notification - channels that have limited capacity might not show this - documentation. - user_labels: - User-supplied key/value data to be used for organizing and - identifying the ``AlertPolicy`` objects. The field can - contain up to 64 entries. Each key and value is limited to 63 - Unicode characters or 128 bytes, whichever is smaller. Labels - and values can contain only lowercase letters, numerals, - underscores, and dashes. Keys must begin with a letter. - conditions: - A list of conditions for the policy. The conditions are - combined by AND or OR according to the ``combiner`` field. If - the combined conditions evaluate to true, then an incident is - created. A policy can have from one to six conditions. - combiner: - How to combine the results of multiple conditions to determine - if an incident should be opened. - enabled: - Whether or not the policy is enabled. On write, the default - interpretation if unset is that the policy is enabled. On - read, clients should not make any assumption about the state - if it has not been populated. The field should always be - populated on List and Get operations, unless a field - projection has been specified that strips it out. - validity: - Read-only description of how the alert policy is invalid. OK - if the alert policy is valid. If not OK, the alert policy will - not generate incidents. - notification_channels: - Identifies the notification channels to which notifications - should be sent when incidents are opened or closed or when new - violations occur on an already opened incident. Each element - of this array corresponds to the ``name`` field in each of the - [``NotificationChannel``][google.monitoring.v3.NotificationCha - nnel] objects that are returned from the [``ListNotificationCh - annels``][google.monitoring.v3.NotificationChannelService.List - NotificationChannels] method. The syntax of the entries in - this field is: :: - projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID] - creation_record: - A read-only record of the creation of the alerting policy. If - provided in a call to create or update, this field will be - ignored. - mutation_record: - A read-only record of the most recent change to the alerting - policy. If provided in a call to create or update, this field - will be ignored. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.AlertPolicy) - ), -) -_sym_db.RegisterMessage(AlertPolicy) -_sym_db.RegisterMessage(AlertPolicy.Documentation) -_sym_db.RegisterMessage(AlertPolicy.Condition) -_sym_db.RegisterMessage(AlertPolicy.Condition.Trigger) -_sym_db.RegisterMessage(AlertPolicy.Condition.MetricThreshold) -_sym_db.RegisterMessage(AlertPolicy.Condition.MetricAbsence) -_sym_db.RegisterMessage(AlertPolicy.UserLabelsEntry) - - -DESCRIPTOR._options = None -_ALERTPOLICY_USERLABELSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/alert_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/alert_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/alert_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/monitoring/google/cloud/monitoring_v3/proto/alert_service.proto b/monitoring/google/cloud/monitoring_v3/proto/alert_service.proto deleted file mode 100644 index de5a30f02938..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/alert_service.proto +++ /dev/null @@ -1,204 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -import "google/api/annotations.proto"; -import "google/monitoring/v3/alert.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/api/client.proto"; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "AlertServiceProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// The AlertPolicyService API is used to manage (list, create, delete, -// edit) alert policies in Stackdriver Monitoring. An alerting policy is -// a description of the conditions under which some aspect of your -// system is considered to be "unhealthy" and the ways to notify -// people or services about this state. In addition to using this API, alert -// policies can also be managed through -// [Stackdriver Monitoring](https://cloud.google.com/monitoring/docs/), -// which can be reached by clicking the "Monitoring" tab in -// [Cloud Console](https://console.cloud.google.com/). -service AlertPolicyService { - option (google.api.default_host) = "monitoring.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/monitoring," - "https://www.googleapis.com/auth/monitoring.read"; - - // Lists the existing alerting policies for the project. - rpc ListAlertPolicies(ListAlertPoliciesRequest) returns (ListAlertPoliciesResponse) { - option (google.api.http) = { - get: "/v3/{name=projects/*}/alertPolicies" - }; - } - - // Gets a single alerting policy. - rpc GetAlertPolicy(GetAlertPolicyRequest) returns (AlertPolicy) { - option (google.api.http) = { - get: "/v3/{name=projects/*/alertPolicies/*}" - }; - } - - // Creates a new alerting policy. - rpc CreateAlertPolicy(CreateAlertPolicyRequest) returns (AlertPolicy) { - option (google.api.http) = { - post: "/v3/{name=projects/*}/alertPolicies" - body: "alert_policy" - }; - } - - // Deletes an alerting policy. - rpc DeleteAlertPolicy(DeleteAlertPolicyRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v3/{name=projects/*/alertPolicies/*}" - }; - } - - // Updates an alerting policy. You can either replace the entire policy with - // a new one or replace only certain fields in the current alerting policy by - // specifying the fields to be updated via `updateMask`. Returns the - // updated alerting policy. - rpc UpdateAlertPolicy(UpdateAlertPolicyRequest) returns (AlertPolicy) { - option (google.api.http) = { - patch: "/v3/{alert_policy.name=projects/*/alertPolicies/*}" - body: "alert_policy" - }; - } -} - -// The protocol for the `CreateAlertPolicy` request. -message CreateAlertPolicyRequest { - // The project in which to create the alerting policy. The format is - // `projects/[PROJECT_ID]`. - // - // Note that this field names the parent container in which the alerting - // policy will be written, not the name of the created policy. The alerting - // policy that is returned will have a name that contains a normalized - // representation of this name as a prefix but adds a suffix of the form - // `/alertPolicies/[POLICY_ID]`, identifying the policy in the container. - string name = 3; - - // The requested alerting policy. You should omit the `name` field in this - // policy. The name will be returned in the new policy, including - // a new [ALERT_POLICY_ID] value. - AlertPolicy alert_policy = 2; -} - -// The protocol for the `GetAlertPolicy` request. -message GetAlertPolicyRequest { - // The alerting policy to retrieve. The format is - // - // projects/[PROJECT_ID]/alertPolicies/[ALERT_POLICY_ID] - string name = 3; -} - -// The protocol for the `ListAlertPolicies` request. -message ListAlertPoliciesRequest { - // The project whose alert policies are to be listed. The format is - // - // projects/[PROJECT_ID] - // - // Note that this field names the parent container in which the alerting - // policies to be listed are stored. To retrieve a single alerting policy - // by name, use the - // [GetAlertPolicy][google.monitoring.v3.AlertPolicyService.GetAlertPolicy] - // operation, instead. - string name = 4; - - // If provided, this field specifies the criteria that must be met by - // alert policies to be included in the response. - // - // For more details, see [sorting and - // filtering](/monitoring/api/v3/sorting-and-filtering). - string filter = 5; - - // A comma-separated list of fields by which to sort the result. Supports - // the same set of field references as the `filter` field. Entries can be - // prefixed with a minus sign to sort by the field in descending order. - // - // For more details, see [sorting and - // filtering](/monitoring/api/v3/sorting-and-filtering). - string order_by = 6; - - // The maximum number of results to return in a single response. - int32 page_size = 2; - - // If this field is not empty then it must contain the `nextPageToken` value - // returned by a previous call to this method. Using this field causes the - // method to return more results from the previous method call. - string page_token = 3; -} - -// The protocol for the `ListAlertPolicies` response. -message ListAlertPoliciesResponse { - // The returned alert policies. - repeated AlertPolicy alert_policies = 3; - - // If there might be more results than were returned, then this field is set - // to a non-empty value. To see the additional results, - // use that value as `pageToken` in the next call to this method. - string next_page_token = 2; -} - -// The protocol for the `UpdateAlertPolicy` request. -message UpdateAlertPolicyRequest { - // Optional. A list of alerting policy field names. If this field is not - // empty, each listed field in the existing alerting policy is set to the - // value of the corresponding field in the supplied policy (`alert_policy`), - // or to the field's default value if the field is not in the supplied - // alerting policy. Fields not listed retain their previous value. - // - // Examples of valid field masks include `display_name`, `documentation`, - // `documentation.content`, `documentation.mime_type`, `user_labels`, - // `user_label.nameofkey`, `enabled`, `conditions`, `combiner`, etc. - // - // If this field is empty, then the supplied alerting policy replaces the - // existing policy. It is the same as deleting the existing policy and - // adding the supplied policy, except for the following: - // - // + The new policy will have the same `[ALERT_POLICY_ID]` as the former - // policy. This gives you continuity with the former policy in your - // notifications and incidents. - // + Conditions in the new policy will keep their former `[CONDITION_ID]` if - // the supplied condition includes the `name` field with that - // `[CONDITION_ID]`. If the supplied condition omits the `name` field, - // then a new `[CONDITION_ID]` is created. - google.protobuf.FieldMask update_mask = 2; - - // Required. The updated alerting policy or the updated values for the - // fields listed in `update_mask`. - // If `update_mask` is not empty, any fields in this policy that are - // not in `update_mask` are ignored. - AlertPolicy alert_policy = 3; -} - -// The protocol for the `DeleteAlertPolicy` request. -message DeleteAlertPolicyRequest { - // The alerting policy to delete. The format is: - // - // projects/[PROJECT_ID]/alertPolicies/[ALERT_POLICY_ID] - // - // For more information, see [AlertPolicy][google.monitoring.v3.AlertPolicy]. - string name = 3; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/alert_service_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/alert_service_pb2.py deleted file mode 100644 index 5f9be50453dd..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/alert_service_pb2.py +++ /dev/null @@ -1,684 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/alert_service.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.monitoring_v3.proto import ( - alert_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/alert_service.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\021AlertServiceProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n4google/cloud/monitoring_v3/proto/alert_service.proto\x12\x14google.monitoring.v3\x1a\x1cgoogle/api/annotations.proto\x1a,google/cloud/monitoring_v3/proto/alert.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/api/client.proto"a\n\x18\x43reateAlertPolicyRequest\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x37\n\x0c\x61lert_policy\x18\x02 \x01(\x0b\x32!.google.monitoring.v3.AlertPolicy"%\n\x15GetAlertPolicyRequest\x12\x0c\n\x04name\x18\x03 \x01(\t"q\n\x18ListAlertPoliciesRequest\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12\x10\n\x08order_by\x18\x06 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"o\n\x19ListAlertPoliciesResponse\x12\x39\n\x0e\x61lert_policies\x18\x03 \x03(\x0b\x32!.google.monitoring.v3.AlertPolicy\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x84\x01\n\x18UpdateAlertPolicyRequest\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x37\n\x0c\x61lert_policy\x18\x03 \x01(\x0b\x32!.google.monitoring.v3.AlertPolicy"(\n\x18\x44\x65leteAlertPolicyRequest\x12\x0c\n\x04name\x18\x03 \x01(\t2\xda\x07\n\x12\x41lertPolicyService\x12\xa1\x01\n\x11ListAlertPolicies\x12..google.monitoring.v3.ListAlertPoliciesRequest\x1a/.google.monitoring.v3.ListAlertPoliciesResponse"+\x82\xd3\xe4\x93\x02%\x12#/v3/{name=projects/*}/alertPolicies\x12\x8f\x01\n\x0eGetAlertPolicy\x12+.google.monitoring.v3.GetAlertPolicyRequest\x1a!.google.monitoring.v3.AlertPolicy"-\x82\xd3\xe4\x93\x02\'\x12%/v3/{name=projects/*/alertPolicies/*}\x12\xa1\x01\n\x11\x43reateAlertPolicy\x12..google.monitoring.v3.CreateAlertPolicyRequest\x1a!.google.monitoring.v3.AlertPolicy"9\x82\xd3\xe4\x93\x02\x33"#/v3/{name=projects/*}/alertPolicies:\x0c\x61lert_policy\x12\x8a\x01\n\x11\x44\x65leteAlertPolicy\x12..google.monitoring.v3.DeleteAlertPolicyRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v3/{name=projects/*/alertPolicies/*}\x12\xb0\x01\n\x11UpdateAlertPolicy\x12..google.monitoring.v3.UpdateAlertPolicyRequest\x1a!.google.monitoring.v3.AlertPolicy"H\x82\xd3\xe4\x93\x02\x42\x32\x32/v3/{alert_policy.name=projects/*/alertPolicies/*}:\x0c\x61lert_policy\x1a\xa9\x01\xca\x41\x19monitoring.googleapis.com\xd2\x41\x89\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.readB\xa9\x01\n\x18\x63om.google.monitoring.v3B\x11\x41lertServiceProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_CREATEALERTPOLICYREQUEST = _descriptor.Descriptor( - name="CreateAlertPolicyRequest", - full_name="google.monitoring.v3.CreateAlertPolicyRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.CreateAlertPolicyRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="alert_policy", - full_name="google.monitoring.v3.CreateAlertPolicyRequest.alert_policy", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=242, - serialized_end=339, -) - - -_GETALERTPOLICYREQUEST = _descriptor.Descriptor( - name="GetAlertPolicyRequest", - full_name="google.monitoring.v3.GetAlertPolicyRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.GetAlertPolicyRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=341, - serialized_end=378, -) - - -_LISTALERTPOLICIESREQUEST = _descriptor.Descriptor( - name="ListAlertPoliciesRequest", - full_name="google.monitoring.v3.ListAlertPoliciesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.ListAlertPoliciesRequest.name", - index=0, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.monitoring.v3.ListAlertPoliciesRequest.filter", - index=1, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="order_by", - full_name="google.monitoring.v3.ListAlertPoliciesRequest.order_by", - index=2, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.monitoring.v3.ListAlertPoliciesRequest.page_size", - index=3, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.monitoring.v3.ListAlertPoliciesRequest.page_token", - index=4, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=380, - serialized_end=493, -) - - -_LISTALERTPOLICIESRESPONSE = _descriptor.Descriptor( - name="ListAlertPoliciesResponse", - full_name="google.monitoring.v3.ListAlertPoliciesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="alert_policies", - full_name="google.monitoring.v3.ListAlertPoliciesResponse.alert_policies", - index=0, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.monitoring.v3.ListAlertPoliciesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=495, - serialized_end=606, -) - - -_UPDATEALERTPOLICYREQUEST = _descriptor.Descriptor( - name="UpdateAlertPolicyRequest", - full_name="google.monitoring.v3.UpdateAlertPolicyRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.monitoring.v3.UpdateAlertPolicyRequest.update_mask", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="alert_policy", - full_name="google.monitoring.v3.UpdateAlertPolicyRequest.alert_policy", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=609, - serialized_end=741, -) - - -_DELETEALERTPOLICYREQUEST = _descriptor.Descriptor( - name="DeleteAlertPolicyRequest", - full_name="google.monitoring.v3.DeleteAlertPolicyRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.DeleteAlertPolicyRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=743, - serialized_end=783, -) - -_CREATEALERTPOLICYREQUEST.fields_by_name[ - "alert_policy" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2._ALERTPOLICY -) -_LISTALERTPOLICIESRESPONSE.fields_by_name[ - "alert_policies" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2._ALERTPOLICY -) -_UPDATEALERTPOLICYREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_UPDATEALERTPOLICYREQUEST.fields_by_name[ - "alert_policy" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2._ALERTPOLICY -) -DESCRIPTOR.message_types_by_name["CreateAlertPolicyRequest"] = _CREATEALERTPOLICYREQUEST -DESCRIPTOR.message_types_by_name["GetAlertPolicyRequest"] = _GETALERTPOLICYREQUEST -DESCRIPTOR.message_types_by_name["ListAlertPoliciesRequest"] = _LISTALERTPOLICIESREQUEST -DESCRIPTOR.message_types_by_name[ - "ListAlertPoliciesResponse" -] = _LISTALERTPOLICIESRESPONSE -DESCRIPTOR.message_types_by_name["UpdateAlertPolicyRequest"] = _UPDATEALERTPOLICYREQUEST -DESCRIPTOR.message_types_by_name["DeleteAlertPolicyRequest"] = _DELETEALERTPOLICYREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -CreateAlertPolicyRequest = _reflection.GeneratedProtocolMessageType( - "CreateAlertPolicyRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATEALERTPOLICYREQUEST, - __module__="google.cloud.monitoring_v3.proto.alert_service_pb2", - __doc__="""The protocol for the ``CreateAlertPolicy`` request. - - - Attributes: - name: - The project in which to create the alerting policy. The format - is ``projects/[PROJECT_ID]``. Note that this field names the - parent container in which the alerting policy will be written, - not the name of the created policy. The alerting policy that - is returned will have a name that contains a normalized - representation of this name as a prefix but adds a suffix of - the form ``/alertPolicies/[POLICY_ID]``, identifying the - policy in the container. - alert_policy: - The requested alerting policy. You should omit the ``name`` - field in this policy. The name will be returned in the new - policy, including a new [ALERT\_POLICY\_ID] value. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateAlertPolicyRequest) - ), -) -_sym_db.RegisterMessage(CreateAlertPolicyRequest) - -GetAlertPolicyRequest = _reflection.GeneratedProtocolMessageType( - "GetAlertPolicyRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETALERTPOLICYREQUEST, - __module__="google.cloud.monitoring_v3.proto.alert_service_pb2", - __doc__="""The protocol for the ``GetAlertPolicy`` request. - - - Attributes: - name: - The alerting policy to retrieve. The format is :: - projects/[PROJECT_ID]/alertPolicies/[ALERT_POLICY_ID] - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.GetAlertPolicyRequest) - ), -) -_sym_db.RegisterMessage(GetAlertPolicyRequest) - -ListAlertPoliciesRequest = _reflection.GeneratedProtocolMessageType( - "ListAlertPoliciesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTALERTPOLICIESREQUEST, - __module__="google.cloud.monitoring_v3.proto.alert_service_pb2", - __doc__="""The protocol for the ``ListAlertPolicies`` request. - - - Attributes: - name: - The project whose alert policies are to be listed. The format - is :: projects/[PROJECT_ID] Note that this field names - the parent container in which the alerting policies to be - listed are stored. To retrieve a single alerting policy by - name, use the [GetAlertPolicy][google.monitoring.v3.AlertPolic - yService.GetAlertPolicy] operation, instead. - filter: - If provided, this field specifies the criteria that must be - met by alert policies to be included in the response. For - more details, see `sorting and filtering - `__. - order_by: - A comma-separated list of fields by which to sort the result. - Supports the same set of field references as the ``filter`` - field. Entries can be prefixed with a minus sign to sort by - the field in descending order. For more details, see `sorting - and filtering `__. - page_size: - The maximum number of results to return in a single response. - page_token: - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return more - results from the previous method call. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListAlertPoliciesRequest) - ), -) -_sym_db.RegisterMessage(ListAlertPoliciesRequest) - -ListAlertPoliciesResponse = _reflection.GeneratedProtocolMessageType( - "ListAlertPoliciesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTALERTPOLICIESRESPONSE, - __module__="google.cloud.monitoring_v3.proto.alert_service_pb2", - __doc__="""The protocol for the ``ListAlertPolicies`` response. - - - Attributes: - alert_policies: - The returned alert policies. - next_page_token: - If there might be more results than were returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``pageToken`` in the next call to - this method. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListAlertPoliciesResponse) - ), -) -_sym_db.RegisterMessage(ListAlertPoliciesResponse) - -UpdateAlertPolicyRequest = _reflection.GeneratedProtocolMessageType( - "UpdateAlertPolicyRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATEALERTPOLICYREQUEST, - __module__="google.cloud.monitoring_v3.proto.alert_service_pb2", - __doc__="""The protocol for the ``UpdateAlertPolicy`` request. - - - Attributes: - update_mask: - Optional. A list of alerting policy field names. If this field - is not empty, each listed field in the existing alerting - policy is set to the value of the corresponding field in the - supplied policy (``alert_policy``), or to the field's default - value if the field is not in the supplied alerting policy. - Fields not listed retain their previous value. Examples of - valid field masks include ``display_name``, ``documentation``, - ``documentation.content``, ``documentation.mime_type``, - ``user_labels``, ``user_label.nameofkey``, ``enabled``, - ``conditions``, ``combiner``, etc. If this field is empty, - then the supplied alerting policy replaces the existing - policy. It is the same as deleting the existing policy and - adding the supplied policy, except for the following: - The - new policy will have the same ``[ALERT_POLICY_ID]`` as the - former policy. This gives you continuity with the former - policy in your notifications and incidents. - Conditions - in the new policy will keep their former ``[CONDITION_ID]`` - if the supplied condition includes the ``name`` field with - that ``[CONDITION_ID]``. If the supplied condition omits - the ``name`` field, then a new ``[CONDITION_ID]`` is created. - alert_policy: - Required. The updated alerting policy or the updated values - for the fields listed in ``update_mask``. If ``update_mask`` - is not empty, any fields in this policy that are not in - ``update_mask`` are ignored. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UpdateAlertPolicyRequest) - ), -) -_sym_db.RegisterMessage(UpdateAlertPolicyRequest) - -DeleteAlertPolicyRequest = _reflection.GeneratedProtocolMessageType( - "DeleteAlertPolicyRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEALERTPOLICYREQUEST, - __module__="google.cloud.monitoring_v3.proto.alert_service_pb2", - __doc__="""The protocol for the ``DeleteAlertPolicy`` request. - - - Attributes: - name: - The alerting policy to delete. The format is: :: - projects/[PROJECT_ID]/alertPolicies/[ALERT_POLICY_ID] For - more information, see - [AlertPolicy][google.monitoring.v3.AlertPolicy]. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.DeleteAlertPolicyRequest) - ), -) -_sym_db.RegisterMessage(DeleteAlertPolicyRequest) - - -DESCRIPTOR._options = None - -_ALERTPOLICYSERVICE = _descriptor.ServiceDescriptor( - name="AlertPolicyService", - full_name="google.monitoring.v3.AlertPolicyService", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\031monitoring.googleapis.com\322A\211\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.read" - ), - serialized_start=786, - serialized_end=1772, - methods=[ - _descriptor.MethodDescriptor( - name="ListAlertPolicies", - full_name="google.monitoring.v3.AlertPolicyService.ListAlertPolicies", - index=0, - containing_service=None, - input_type=_LISTALERTPOLICIESREQUEST, - output_type=_LISTALERTPOLICIESRESPONSE, - serialized_options=_b( - "\202\323\344\223\002%\022#/v3/{name=projects/*}/alertPolicies" - ), - ), - _descriptor.MethodDescriptor( - name="GetAlertPolicy", - full_name="google.monitoring.v3.AlertPolicyService.GetAlertPolicy", - index=1, - containing_service=None, - input_type=_GETALERTPOLICYREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2._ALERTPOLICY, - serialized_options=_b( - "\202\323\344\223\002'\022%/v3/{name=projects/*/alertPolicies/*}" - ), - ), - _descriptor.MethodDescriptor( - name="CreateAlertPolicy", - full_name="google.monitoring.v3.AlertPolicyService.CreateAlertPolicy", - index=2, - containing_service=None, - input_type=_CREATEALERTPOLICYREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2._ALERTPOLICY, - serialized_options=_b( - '\202\323\344\223\0023"#/v3/{name=projects/*}/alertPolicies:\014alert_policy' - ), - ), - _descriptor.MethodDescriptor( - name="DeleteAlertPolicy", - full_name="google.monitoring.v3.AlertPolicyService.DeleteAlertPolicy", - index=3, - containing_service=None, - input_type=_DELETEALERTPOLICYREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002'*%/v3/{name=projects/*/alertPolicies/*}" - ), - ), - _descriptor.MethodDescriptor( - name="UpdateAlertPolicy", - full_name="google.monitoring.v3.AlertPolicyService.UpdateAlertPolicy", - index=4, - containing_service=None, - input_type=_UPDATEALERTPOLICYREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2._ALERTPOLICY, - serialized_options=_b( - "\202\323\344\223\002B22/v3/{alert_policy.name=projects/*/alertPolicies/*}:\014alert_policy" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_ALERTPOLICYSERVICE) - -DESCRIPTOR.services_by_name["AlertPolicyService"] = _ALERTPOLICYSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/alert_service_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/alert_service_pb2_grpc.py deleted file mode 100644 index d5773d78c4d4..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/alert_service_pb2_grpc.py +++ /dev/null @@ -1,140 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.monitoring_v3.proto import ( - alert_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2, -) -from google.cloud.monitoring_v3.proto import ( - alert_service_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__service__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class AlertPolicyServiceStub(object): - """The AlertPolicyService API is used to manage (list, create, delete, - edit) alert policies in Stackdriver Monitoring. An alerting policy is - a description of the conditions under which some aspect of your - system is considered to be "unhealthy" and the ways to notify - people or services about this state. In addition to using this API, alert - policies can also be managed through - [Stackdriver Monitoring](https://cloud.google.com/monitoring/docs/), - which can be reached by clicking the "Monitoring" tab in - [Cloud Console](https://console.cloud.google.com/). - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListAlertPolicies = channel.unary_unary( - "/google.monitoring.v3.AlertPolicyService/ListAlertPolicies", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__service__pb2.ListAlertPoliciesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__service__pb2.ListAlertPoliciesResponse.FromString, - ) - self.GetAlertPolicy = channel.unary_unary( - "/google.monitoring.v3.AlertPolicyService/GetAlertPolicy", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__service__pb2.GetAlertPolicyRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2.AlertPolicy.FromString, - ) - self.CreateAlertPolicy = channel.unary_unary( - "/google.monitoring.v3.AlertPolicyService/CreateAlertPolicy", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__service__pb2.CreateAlertPolicyRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2.AlertPolicy.FromString, - ) - self.DeleteAlertPolicy = channel.unary_unary( - "/google.monitoring.v3.AlertPolicyService/DeleteAlertPolicy", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__service__pb2.DeleteAlertPolicyRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.UpdateAlertPolicy = channel.unary_unary( - "/google.monitoring.v3.AlertPolicyService/UpdateAlertPolicy", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__service__pb2.UpdateAlertPolicyRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2.AlertPolicy.FromString, - ) - - -class AlertPolicyServiceServicer(object): - """The AlertPolicyService API is used to manage (list, create, delete, - edit) alert policies in Stackdriver Monitoring. An alerting policy is - a description of the conditions under which some aspect of your - system is considered to be "unhealthy" and the ways to notify - people or services about this state. In addition to using this API, alert - policies can also be managed through - [Stackdriver Monitoring](https://cloud.google.com/monitoring/docs/), - which can be reached by clicking the "Monitoring" tab in - [Cloud Console](https://console.cloud.google.com/). - """ - - def ListAlertPolicies(self, request, context): - """Lists the existing alerting policies for the project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetAlertPolicy(self, request, context): - """Gets a single alerting policy. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateAlertPolicy(self, request, context): - """Creates a new alerting policy. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteAlertPolicy(self, request, context): - """Deletes an alerting policy. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateAlertPolicy(self, request, context): - """Updates an alerting policy. You can either replace the entire policy with - a new one or replace only certain fields in the current alerting policy by - specifying the fields to be updated via `updateMask`. Returns the - updated alerting policy. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_AlertPolicyServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "ListAlertPolicies": grpc.unary_unary_rpc_method_handler( - servicer.ListAlertPolicies, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__service__pb2.ListAlertPoliciesRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__service__pb2.ListAlertPoliciesResponse.SerializeToString, - ), - "GetAlertPolicy": grpc.unary_unary_rpc_method_handler( - servicer.GetAlertPolicy, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__service__pb2.GetAlertPolicyRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2.AlertPolicy.SerializeToString, - ), - "CreateAlertPolicy": grpc.unary_unary_rpc_method_handler( - servicer.CreateAlertPolicy, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__service__pb2.CreateAlertPolicyRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2.AlertPolicy.SerializeToString, - ), - "DeleteAlertPolicy": grpc.unary_unary_rpc_method_handler( - servicer.DeleteAlertPolicy, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__service__pb2.DeleteAlertPolicyRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "UpdateAlertPolicy": grpc.unary_unary_rpc_method_handler( - servicer.UpdateAlertPolicy, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__service__pb2.UpdateAlertPolicyRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2.AlertPolicy.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.monitoring.v3.AlertPolicyService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/monitoring/google/cloud/monitoring_v3/proto/common.proto b/monitoring/google/cloud/monitoring_v3/proto/common.proto deleted file mode 100644 index 1e89b0dcdaff..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/common.proto +++ /dev/null @@ -1,432 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -import "google/api/distribution.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "CommonProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// A single strongly-typed value. -message TypedValue { - // The typed value field. - oneof value { - // A Boolean value: `true` or `false`. - bool bool_value = 1; - - // A 64-bit integer. Its range is approximately ±9.2x1018. - int64 int64_value = 2; - - // A 64-bit double-precision floating-point number. Its magnitude - // is approximately ±10±300 and it has 16 - // significant digits of precision. - double double_value = 3; - - // A variable-length string value. - string string_value = 4; - - // A distribution value. - google.api.Distribution distribution_value = 5; - } -} - -// A closed time interval. It extends from the start time to the end time, and includes both: `[startTime, endTime]`. Valid time intervals depend on the [`MetricKind`](/monitoring/api/ref_v3/rest/v3/projects.metricDescriptors#MetricKind) of the metric value. In no case can the end time be earlier than the start time. -// -// * For a `GAUGE` metric, the `startTime` value is technically optional; if -// no value is specified, the start time defaults to the value of the -// end time, and the interval represents a single point in time. If both -// start and end times are specified, they must be identical. Such an -// interval is valid only for `GAUGE` metrics, which are point-in-time -// measurements. -// -// * For `DELTA` and `CUMULATIVE` metrics, the start time must be earlier -// than the end time. -// -// * In all cases, the start time of the next interval must be -// at least a microsecond after the end time of the previous interval. -// Because the interval is closed, if the start time of a new interval -// is the same as the end time of the previous interval, data written -// at the new start time could overwrite data written at the previous -// end time. -message TimeInterval { - // Required. The end of the time interval. - google.protobuf.Timestamp end_time = 2; - - // Optional. The beginning of the time interval. The default value - // for the start time is the end time. The start time must not be - // later than the end time. - google.protobuf.Timestamp start_time = 1; -} - -// Describes how to combine multiple time series to provide different views of -// the data. Aggregation consists of an alignment step on individual time -// series (`alignment_period` and `per_series_aligner`) followed by an optional -// reduction step of the data across the aligned time series -// (`cross_series_reducer` and `group_by_fields`). For more details, see -// [Aggregation](/monitoring/api/learn_more#aggregation). -message Aggregation { - // The Aligner describes how to bring the data points in a single - // time series into temporal alignment. - enum Aligner { - // No alignment. Raw data is returned. Not valid if cross-time - // series reduction is requested. The value type of the result is - // the same as the value type of the input. - ALIGN_NONE = 0; - - // Align and convert to delta metric type. This alignment is valid - // for cumulative metrics and delta metrics. Aligning an existing - // delta metric to a delta metric requires that the alignment - // period be increased. The value type of the result is the same - // as the value type of the input. - // - // One can think of this aligner as a rate but without time units; that - // is, the output is conceptually (second_point - first_point). - ALIGN_DELTA = 1; - - // Align and convert to a rate. This alignment is valid for - // cumulative metrics and delta metrics with numeric values. The output is a - // gauge metric with value type - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. - // - // One can think of this aligner as conceptually providing the slope of - // the line that passes through the value at the start and end of the - // window. In other words, this is conceptually ((y1 - y0)/(t1 - t0)), - // and the output unit is one that has a "/time" dimension. - // - // If, by rate, you are looking for percentage change, see the - // `ALIGN_PERCENT_CHANGE` aligner option. - ALIGN_RATE = 2; - - // Align by interpolating between adjacent points around the - // period boundary. This alignment is valid for gauge - // metrics with numeric values. The value type of the result is the same - // as the value type of the input. - ALIGN_INTERPOLATE = 3; - - // Align by shifting the oldest data point before the period - // boundary to the boundary. This alignment is valid for gauge - // metrics. The value type of the result is the same as the - // value type of the input. - ALIGN_NEXT_OLDER = 4; - - // Align time series via aggregation. The resulting data point in - // the alignment period is the minimum of all data points in the - // period. This alignment is valid for gauge and delta metrics with numeric - // values. The value type of the result is the same as the value - // type of the input. - ALIGN_MIN = 10; - - // Align time series via aggregation. The resulting data point in - // the alignment period is the maximum of all data points in the - // period. This alignment is valid for gauge and delta metrics with numeric - // values. The value type of the result is the same as the value - // type of the input. - ALIGN_MAX = 11; - - // Align time series via aggregation. The resulting data point in - // the alignment period is the average or arithmetic mean of all - // data points in the period. This alignment is valid for gauge and delta - // metrics with numeric values. The value type of the output is - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. - ALIGN_MEAN = 12; - - // Align time series via aggregation. The resulting data point in - // the alignment period is the count of all data points in the - // period. This alignment is valid for gauge and delta metrics with numeric - // or Boolean values. The value type of the output is - // [INT64][google.api.MetricDescriptor.ValueType.INT64]. - ALIGN_COUNT = 13; - - // Align time series via aggregation. The resulting data point in - // the alignment period is the sum of all data points in the - // period. This alignment is valid for gauge and delta metrics with numeric - // and distribution values. The value type of the output is the - // same as the value type of the input. - ALIGN_SUM = 14; - - // Align time series via aggregation. The resulting data point in - // the alignment period is the standard deviation of all data - // points in the period. This alignment is valid for gauge and delta metrics - // with numeric values. The value type of the output is - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. - ALIGN_STDDEV = 15; - - // Align time series via aggregation. The resulting data point in - // the alignment period is the count of True-valued data points in the - // period. This alignment is valid for gauge metrics with - // Boolean values. The value type of the output is - // [INT64][google.api.MetricDescriptor.ValueType.INT64]. - ALIGN_COUNT_TRUE = 16; - - // Align time series via aggregation. The resulting data point in - // the alignment period is the count of False-valued data points in the - // period. This alignment is valid for gauge metrics with - // Boolean values. The value type of the output is - // [INT64][google.api.MetricDescriptor.ValueType.INT64]. - ALIGN_COUNT_FALSE = 24; - - // Align time series via aggregation. The resulting data point in - // the alignment period is the fraction of True-valued data points in the - // period. This alignment is valid for gauge metrics with Boolean values. - // The output value is in the range [0, 1] and has value type - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. - ALIGN_FRACTION_TRUE = 17; - - // Align time series via aggregation. The resulting data point in - // the alignment period is the 99th percentile of all data - // points in the period. This alignment is valid for gauge and delta metrics - // with distribution values. The output is a gauge metric with value type - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. - ALIGN_PERCENTILE_99 = 18; - - // Align time series via aggregation. The resulting data point in - // the alignment period is the 95th percentile of all data - // points in the period. This alignment is valid for gauge and delta metrics - // with distribution values. The output is a gauge metric with value type - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. - ALIGN_PERCENTILE_95 = 19; - - // Align time series via aggregation. The resulting data point in - // the alignment period is the 50th percentile of all data - // points in the period. This alignment is valid for gauge and delta metrics - // with distribution values. The output is a gauge metric with value type - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. - ALIGN_PERCENTILE_50 = 20; - - // Align time series via aggregation. The resulting data point in - // the alignment period is the 5th percentile of all data - // points in the period. This alignment is valid for gauge and delta metrics - // with distribution values. The output is a gauge metric with value type - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. - ALIGN_PERCENTILE_05 = 21; - - // Align and convert to a percentage change. This alignment is valid for - // gauge and delta metrics with numeric values. This alignment conceptually - // computes the equivalent of "((current - previous)/previous)*100" - // where previous value is determined based on the alignmentPeriod. - // In the event that previous is 0 the calculated value is infinity with the - // exception that if both (current - previous) and previous are 0 the - // calculated value is 0. - // A 10 minute moving mean is computed at each point of the time window - // prior to the above calculation to smooth the metric and prevent false - // positives from very short lived spikes. - // Only applicable for data that is >= 0. Any values < 0 are treated as - // no data. While delta metrics are accepted by this alignment special care - // should be taken that the values for the metric will always be positive. - // The output is a gauge metric with value type - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. - ALIGN_PERCENT_CHANGE = 23; - } - - // A Reducer describes how to aggregate data points from multiple - // time series into a single time series. - enum Reducer { - // No cross-time series reduction. The output of the aligner is - // returned. - REDUCE_NONE = 0; - - // Reduce by computing the mean across time series for each - // alignment period. This reducer is valid for delta and - // gauge metrics with numeric or distribution values. The value type of the - // output is [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. - REDUCE_MEAN = 1; - - // Reduce by computing the minimum across time series for each - // alignment period. This reducer is valid for delta and - // gauge metrics with numeric values. The value type of the output - // is the same as the value type of the input. - REDUCE_MIN = 2; - - // Reduce by computing the maximum across time series for each - // alignment period. This reducer is valid for delta and - // gauge metrics with numeric values. The value type of the output - // is the same as the value type of the input. - REDUCE_MAX = 3; - - // Reduce by computing the sum across time series for each - // alignment period. This reducer is valid for delta and - // gauge metrics with numeric and distribution values. The value type of - // the output is the same as the value type of the input. - REDUCE_SUM = 4; - - // Reduce by computing the standard deviation across time series - // for each alignment period. This reducer is valid for delta - // and gauge metrics with numeric or distribution values. The value type of - // the output is [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. - REDUCE_STDDEV = 5; - - // Reduce by computing the count of data points across time series - // for each alignment period. This reducer is valid for delta - // and gauge metrics of numeric, Boolean, distribution, and string value - // type. The value type of the output is - // [INT64][google.api.MetricDescriptor.ValueType.INT64]. - REDUCE_COUNT = 6; - - // Reduce by computing the count of True-valued data points across time - // series for each alignment period. This reducer is valid for delta - // and gauge metrics of Boolean value type. The value type of - // the output is [INT64][google.api.MetricDescriptor.ValueType.INT64]. - REDUCE_COUNT_TRUE = 7; - - // Reduce by computing the count of False-valued data points across time - // series for each alignment period. This reducer is valid for delta - // and gauge metrics of Boolean value type. The value type of - // the output is [INT64][google.api.MetricDescriptor.ValueType.INT64]. - REDUCE_COUNT_FALSE = 15; - - // Reduce by computing the fraction of True-valued data points across time - // series for each alignment period. This reducer is valid for delta - // and gauge metrics of Boolean value type. The output value is in the - // range [0, 1] and has value type - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. - REDUCE_FRACTION_TRUE = 8; - - // Reduce by computing 99th percentile of data points across time series - // for each alignment period. This reducer is valid for gauge and delta - // metrics of numeric and distribution type. The value of the output is - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE] - REDUCE_PERCENTILE_99 = 9; - - // Reduce by computing 95th percentile of data points across time series - // for each alignment period. This reducer is valid for gauge and delta - // metrics of numeric and distribution type. The value of the output is - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE] - REDUCE_PERCENTILE_95 = 10; - - // Reduce by computing 50th percentile of data points across time series - // for each alignment period. This reducer is valid for gauge and delta - // metrics of numeric and distribution type. The value of the output is - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE] - REDUCE_PERCENTILE_50 = 11; - - // Reduce by computing 5th percentile of data points across time series - // for each alignment period. This reducer is valid for gauge and delta - // metrics of numeric and distribution type. The value of the output is - // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE] - REDUCE_PERCENTILE_05 = 12; - } - - // The alignment period for per-[time series][google.monitoring.v3.TimeSeries] - // alignment. If present, `alignmentPeriod` must be at least 60 - // seconds. After per-time series alignment, each time series will - // contain data points only on the period boundaries. If - // `perSeriesAligner` is not specified or equals `ALIGN_NONE`, then - // this field is ignored. If `perSeriesAligner` is specified and - // does not equal `ALIGN_NONE`, then this field must be defined; - // otherwise an error is returned. - google.protobuf.Duration alignment_period = 1; - - // The approach to be used to align individual time series. Not all - // alignment functions may be applied to all time series, depending - // on the metric type and value type of the original time - // series. Alignment may change the metric type or the value type of - // the time series. - // - // Time series data must be aligned in order to perform cross-time - // series reduction. If `crossSeriesReducer` is specified, then - // `perSeriesAligner` must be specified and not equal `ALIGN_NONE` - // and `alignmentPeriod` must be specified; otherwise, an error is - // returned. - Aligner per_series_aligner = 2; - - // The approach to be used to combine time series. Not all reducer - // functions may be applied to all time series, depending on the - // metric type and the value type of the original time - // series. Reduction may change the metric type of value type of the - // time series. - // - // Time series data must be aligned in order to perform cross-time - // series reduction. If `crossSeriesReducer` is specified, then - // `perSeriesAligner` must be specified and not equal `ALIGN_NONE` - // and `alignmentPeriod` must be specified; otherwise, an error is - // returned. - Reducer cross_series_reducer = 4; - - // The set of fields to preserve when `crossSeriesReducer` is - // specified. The `groupByFields` determine how the time series are - // partitioned into subsets prior to applying the aggregation - // function. Each subset contains time series that have the same - // value for each of the grouping fields. Each individual time - // series is a member of exactly one subset. The - // `crossSeriesReducer` is applied to each subset of time series. - // It is not possible to reduce across different resource types, so - // this field implicitly contains `resource.type`. Fields not - // specified in `groupByFields` are aggregated away. If - // `groupByFields` is not specified and all the time series have - // the same resource type, then the time series are aggregated into - // a single output time series. If `crossSeriesReducer` is not - // defined, this field is ignored. - repeated string group_by_fields = 5; -} - -// Specifies an ordering relationship on two arguments, here called left and -// right. -enum ComparisonType { - // No ordering relationship is specified. - COMPARISON_UNSPECIFIED = 0; - - // The left argument is greater than the right argument. - COMPARISON_GT = 1; - - // The left argument is greater than or equal to the right argument. - COMPARISON_GE = 2; - - // The left argument is less than the right argument. - COMPARISON_LT = 3; - - // The left argument is less than or equal to the right argument. - COMPARISON_LE = 4; - - // The left argument is equal to the right argument. - COMPARISON_EQ = 5; - - // The left argument is not equal to the right argument. - COMPARISON_NE = 6; -} - -// The tier of service for a Workspace. Please see the -// [service tiers -// documentation](https://cloud.google.com/monitoring/workspaces/tiers) for more -// details. -enum ServiceTier { - option deprecated = true; - - // An invalid sentinel value, used to indicate that a tier has not - // been provided explicitly. - SERVICE_TIER_UNSPECIFIED = 0; - - // The Stackdriver Basic tier, a free tier of service that provides basic - // features, a moderate allotment of logs, and access to built-in metrics. - // A number of features are not available in this tier. For more details, - // see [the service tiers - // documentation](https://cloud.google.com/monitoring/workspaces/tiers). - SERVICE_TIER_BASIC = 1; - - // The Stackdriver Premium tier, a higher, more expensive tier of service - // that provides access to all Stackdriver features, lets you use Stackdriver - // with AWS accounts, and has a larger allotments for logs and metrics. For - // more details, see [the service tiers - // documentation](https://cloud.google.com/monitoring/workspaces/tiers). - SERVICE_TIER_PREMIUM = 2; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/common_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/common_pb2.py deleted file mode 100644 index 51585dc065da..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/common_pb2.py +++ /dev/null @@ -1,792 +0,0 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/common.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import distribution_pb2 as google_dot_api_dot_distribution__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/common.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\013CommonProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n-google/cloud/monitoring_v3/proto/common.proto\x12\x14google.monitoring.v3\x1a\x1dgoogle/api/distribution.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xaa\x01\n\nTypedValue\x12\x14\n\nbool_value\x18\x01 \x01(\x08H\x00\x12\x15\n\x0bint64_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x04 \x01(\tH\x00\x12\x36\n\x12\x64istribution_value\x18\x05 \x01(\x0b\x32\x18.google.api.DistributionH\x00\x42\x07\n\x05value"l\n\x0cTimeInterval\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xad\x07\n\x0b\x41ggregation\x12\x33\n\x10\x61lignment_period\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x45\n\x12per_series_aligner\x18\x02 \x01(\x0e\x32).google.monitoring.v3.Aggregation.Aligner\x12G\n\x14\x63ross_series_reducer\x18\x04 \x01(\x0e\x32).google.monitoring.v3.Aggregation.Reducer\x12\x17\n\x0fgroup_by_fields\x18\x05 \x03(\t"\x8b\x03\n\x07\x41ligner\x12\x0e\n\nALIGN_NONE\x10\x00\x12\x0f\n\x0b\x41LIGN_DELTA\x10\x01\x12\x0e\n\nALIGN_RATE\x10\x02\x12\x15\n\x11\x41LIGN_INTERPOLATE\x10\x03\x12\x14\n\x10\x41LIGN_NEXT_OLDER\x10\x04\x12\r\n\tALIGN_MIN\x10\n\x12\r\n\tALIGN_MAX\x10\x0b\x12\x0e\n\nALIGN_MEAN\x10\x0c\x12\x0f\n\x0b\x41LIGN_COUNT\x10\r\x12\r\n\tALIGN_SUM\x10\x0e\x12\x10\n\x0c\x41LIGN_STDDEV\x10\x0f\x12\x14\n\x10\x41LIGN_COUNT_TRUE\x10\x10\x12\x15\n\x11\x41LIGN_COUNT_FALSE\x10\x18\x12\x17\n\x13\x41LIGN_FRACTION_TRUE\x10\x11\x12\x17\n\x13\x41LIGN_PERCENTILE_99\x10\x12\x12\x17\n\x13\x41LIGN_PERCENTILE_95\x10\x13\x12\x17\n\x13\x41LIGN_PERCENTILE_50\x10\x14\x12\x17\n\x13\x41LIGN_PERCENTILE_05\x10\x15\x12\x18\n\x14\x41LIGN_PERCENT_CHANGE\x10\x17"\xb1\x02\n\x07Reducer\x12\x0f\n\x0bREDUCE_NONE\x10\x00\x12\x0f\n\x0bREDUCE_MEAN\x10\x01\x12\x0e\n\nREDUCE_MIN\x10\x02\x12\x0e\n\nREDUCE_MAX\x10\x03\x12\x0e\n\nREDUCE_SUM\x10\x04\x12\x11\n\rREDUCE_STDDEV\x10\x05\x12\x10\n\x0cREDUCE_COUNT\x10\x06\x12\x15\n\x11REDUCE_COUNT_TRUE\x10\x07\x12\x16\n\x12REDUCE_COUNT_FALSE\x10\x0f\x12\x18\n\x14REDUCE_FRACTION_TRUE\x10\x08\x12\x18\n\x14REDUCE_PERCENTILE_99\x10\t\x12\x18\n\x14REDUCE_PERCENTILE_95\x10\n\x12\x18\n\x14REDUCE_PERCENTILE_50\x10\x0b\x12\x18\n\x14REDUCE_PERCENTILE_05\x10\x0c*\x9e\x01\n\x0e\x43omparisonType\x12\x1a\n\x16\x43OMPARISON_UNSPECIFIED\x10\x00\x12\x11\n\rCOMPARISON_GT\x10\x01\x12\x11\n\rCOMPARISON_GE\x10\x02\x12\x11\n\rCOMPARISON_LT\x10\x03\x12\x11\n\rCOMPARISON_LE\x10\x04\x12\x11\n\rCOMPARISON_EQ\x10\x05\x12\x11\n\rCOMPARISON_NE\x10\x06*a\n\x0bServiceTier\x12\x1c\n\x18SERVICE_TIER_UNSPECIFIED\x10\x00\x12\x16\n\x12SERVICE_TIER_BASIC\x10\x01\x12\x18\n\x14SERVICE_TIER_PREMIUM\x10\x02\x1a\x02\x18\x01\x42\xa3\x01\n\x18\x63om.google.monitoring.v3B\x0b\x43ommonProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_distribution__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - -_COMPARISONTYPE = _descriptor.EnumDescriptor( - name="ComparisonType", - full_name="google.monitoring.v3.ComparisonType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="COMPARISON_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="COMPARISON_GT", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="COMPARISON_GE", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="COMPARISON_LT", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="COMPARISON_LE", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="COMPARISON_EQ", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="COMPARISON_NE", index=6, number=6, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1395, - serialized_end=1553, -) -_sym_db.RegisterEnumDescriptor(_COMPARISONTYPE) - -ComparisonType = enum_type_wrapper.EnumTypeWrapper(_COMPARISONTYPE) -_SERVICETIER = _descriptor.EnumDescriptor( - name="ServiceTier", - full_name="google.monitoring.v3.ServiceTier", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="SERVICE_TIER_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="SERVICE_TIER_BASIC", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="SERVICE_TIER_PREMIUM", - index=2, - number=2, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=_b("\030\001"), - serialized_start=1555, - serialized_end=1652, -) -_sym_db.RegisterEnumDescriptor(_SERVICETIER) - -ServiceTier = enum_type_wrapper.EnumTypeWrapper(_SERVICETIER) -COMPARISON_UNSPECIFIED = 0 -COMPARISON_GT = 1 -COMPARISON_GE = 2 -COMPARISON_LT = 3 -COMPARISON_LE = 4 -COMPARISON_EQ = 5 -COMPARISON_NE = 6 -SERVICE_TIER_UNSPECIFIED = 0 -SERVICE_TIER_BASIC = 1 -SERVICE_TIER_PREMIUM = 2 - - -_AGGREGATION_ALIGNER = _descriptor.EnumDescriptor( - name="Aligner", - full_name="google.monitoring.v3.Aggregation.Aligner", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="ALIGN_NONE", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_DELTA", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_RATE", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_INTERPOLATE", - index=3, - number=3, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_NEXT_OLDER", - index=4, - number=4, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_MIN", index=5, number=10, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_MAX", index=6, number=11, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_MEAN", index=7, number=12, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_COUNT", index=8, number=13, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_SUM", index=9, number=14, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_STDDEV", index=10, number=15, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_COUNT_TRUE", - index=11, - number=16, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_COUNT_FALSE", - index=12, - number=24, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_FRACTION_TRUE", - index=13, - number=17, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_PERCENTILE_99", - index=14, - number=18, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_PERCENTILE_95", - index=15, - number=19, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_PERCENTILE_50", - index=16, - number=20, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_PERCENTILE_05", - index=17, - number=21, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_PERCENT_CHANGE", - index=18, - number=23, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=689, - serialized_end=1084, -) -_sym_db.RegisterEnumDescriptor(_AGGREGATION_ALIGNER) - -_AGGREGATION_REDUCER = _descriptor.EnumDescriptor( - name="Reducer", - full_name="google.monitoring.v3.Aggregation.Reducer", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="REDUCE_NONE", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_MEAN", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_MIN", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_MAX", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_SUM", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_STDDEV", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_COUNT", index=6, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_COUNT_TRUE", - index=7, - number=7, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_COUNT_FALSE", - index=8, - number=15, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_FRACTION_TRUE", - index=9, - number=8, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_PERCENTILE_99", - index=10, - number=9, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_PERCENTILE_95", - index=11, - number=10, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_PERCENTILE_50", - index=12, - number=11, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_PERCENTILE_05", - index=13, - number=12, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1087, - serialized_end=1392, -) -_sym_db.RegisterEnumDescriptor(_AGGREGATION_REDUCER) - - -_TYPEDVALUE = _descriptor.Descriptor( - name="TypedValue", - full_name="google.monitoring.v3.TypedValue", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="bool_value", - full_name="google.monitoring.v3.TypedValue.bool_value", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="int64_value", - full_name="google.monitoring.v3.TypedValue.int64_value", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="double_value", - full_name="google.monitoring.v3.TypedValue.double_value", - index=2, - number=3, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="string_value", - full_name="google.monitoring.v3.TypedValue.string_value", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="distribution_value", - full_name="google.monitoring.v3.TypedValue.distribution_value", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="value", - full_name="google.monitoring.v3.TypedValue.value", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=168, - serialized_end=338, -) - - -_TIMEINTERVAL = _descriptor.Descriptor( - name="TimeInterval", - full_name="google.monitoring.v3.TimeInterval", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.monitoring.v3.TimeInterval.end_time", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.monitoring.v3.TimeInterval.start_time", - index=1, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=340, - serialized_end=448, -) - - -_AGGREGATION = _descriptor.Descriptor( - name="Aggregation", - full_name="google.monitoring.v3.Aggregation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="alignment_period", - full_name="google.monitoring.v3.Aggregation.alignment_period", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="per_series_aligner", - full_name="google.monitoring.v3.Aggregation.per_series_aligner", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cross_series_reducer", - full_name="google.monitoring.v3.Aggregation.cross_series_reducer", - index=2, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="group_by_fields", - full_name="google.monitoring.v3.Aggregation.group_by_fields", - index=3, - number=5, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_AGGREGATION_ALIGNER, _AGGREGATION_REDUCER], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=451, - serialized_end=1392, -) - -_TYPEDVALUE.fields_by_name[ - "distribution_value" -].message_type = google_dot_api_dot_distribution__pb2._DISTRIBUTION -_TYPEDVALUE.oneofs_by_name["value"].fields.append( - _TYPEDVALUE.fields_by_name["bool_value"] -) -_TYPEDVALUE.fields_by_name["bool_value"].containing_oneof = _TYPEDVALUE.oneofs_by_name[ - "value" -] -_TYPEDVALUE.oneofs_by_name["value"].fields.append( - _TYPEDVALUE.fields_by_name["int64_value"] -) -_TYPEDVALUE.fields_by_name["int64_value"].containing_oneof = _TYPEDVALUE.oneofs_by_name[ - "value" -] -_TYPEDVALUE.oneofs_by_name["value"].fields.append( - _TYPEDVALUE.fields_by_name["double_value"] -) -_TYPEDVALUE.fields_by_name[ - "double_value" -].containing_oneof = _TYPEDVALUE.oneofs_by_name["value"] -_TYPEDVALUE.oneofs_by_name["value"].fields.append( - _TYPEDVALUE.fields_by_name["string_value"] -) -_TYPEDVALUE.fields_by_name[ - "string_value" -].containing_oneof = _TYPEDVALUE.oneofs_by_name["value"] -_TYPEDVALUE.oneofs_by_name["value"].fields.append( - _TYPEDVALUE.fields_by_name["distribution_value"] -) -_TYPEDVALUE.fields_by_name[ - "distribution_value" -].containing_oneof = _TYPEDVALUE.oneofs_by_name["value"] -_TIMEINTERVAL.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TIMEINTERVAL.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_AGGREGATION.fields_by_name[ - "alignment_period" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_AGGREGATION.fields_by_name["per_series_aligner"].enum_type = _AGGREGATION_ALIGNER -_AGGREGATION.fields_by_name["cross_series_reducer"].enum_type = _AGGREGATION_REDUCER -_AGGREGATION_ALIGNER.containing_type = _AGGREGATION -_AGGREGATION_REDUCER.containing_type = _AGGREGATION -DESCRIPTOR.message_types_by_name["TypedValue"] = _TYPEDVALUE -DESCRIPTOR.message_types_by_name["TimeInterval"] = _TIMEINTERVAL -DESCRIPTOR.message_types_by_name["Aggregation"] = _AGGREGATION -DESCRIPTOR.enum_types_by_name["ComparisonType"] = _COMPARISONTYPE -DESCRIPTOR.enum_types_by_name["ServiceTier"] = _SERVICETIER -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -TypedValue = _reflection.GeneratedProtocolMessageType( - "TypedValue", - (_message.Message,), - dict( - DESCRIPTOR=_TYPEDVALUE, - __module__="google.cloud.monitoring_v3.proto.common_pb2", - __doc__="""A single strongly-typed value. - - - Attributes: - value: - The typed value field. - bool_value: - A Boolean value: ``true`` or ``false``. - int64_value: - A 64-bit integer. Its range is approximately ±9.2x1018. - double_value: - A 64-bit double-precision floating-point number. Its magnitude - is approximately ±10±300 and it has 16 significant digits of - precision. - string_value: - A variable-length string value. - distribution_value: - A distribution value. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.TypedValue) - ), -) -_sym_db.RegisterMessage(TypedValue) - -TimeInterval = _reflection.GeneratedProtocolMessageType( - "TimeInterval", - (_message.Message,), - dict( - DESCRIPTOR=_TIMEINTERVAL, - __module__="google.cloud.monitoring_v3.proto.common_pb2", - __doc__="""A closed time interval. It extends from the start time to - the end time, and includes both: ``[startTime, endTime]``. Valid time - intervals depend on the - ```MetricKind`` `__ - of the metric value. In no case can the end time be earlier than the - start time. - - - For a ``GAUGE`` metric, the ``startTime`` value is technically - optional; if no value is specified, the start time defaults to the - value of the end time, and the interval represents a single point in - time. If both start and end times are specified, they must be - identical. Such an interval is valid only for ``GAUGE`` metrics, - which are point-in-time measurements. - - - For ``DELTA`` and ``CUMULATIVE`` metrics, the start time must be - earlier than the end time. - - - In all cases, the start time of the next interval must be at least a - microsecond after the end time of the previous interval. Because the - interval is closed, if the start time of a new interval is the same - as the end time of the previous interval, data written at the new - start time could overwrite data written at the previous end time. - - - Attributes: - end_time: - Required. The end of the time interval. - start_time: - Optional. The beginning of the time interval. The default - value for the start time is the end time. The start time must - not be later than the end time. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.TimeInterval) - ), -) -_sym_db.RegisterMessage(TimeInterval) - -Aggregation = _reflection.GeneratedProtocolMessageType( - "Aggregation", - (_message.Message,), - dict( - DESCRIPTOR=_AGGREGATION, - __module__="google.cloud.monitoring_v3.proto.common_pb2", - __doc__="""Describes how to combine multiple time series to provide - different views of the data. Aggregation consists of an alignment step - on individual time series (``alignment_period`` and - ``per_series_aligner``) followed by an optional reduction step of the - data across the aligned time series (``cross_series_reducer`` and - ``group_by_fields``). For more details, see - `Aggregation `__. - - - Attributes: - alignment_period: - The alignment period for per-[time - series][google.monitoring.v3.TimeSeries] alignment. If - present, ``alignmentPeriod`` must be at least 60 seconds. - After per-time series alignment, each time series will contain - data points only on the period boundaries. If - ``perSeriesAligner`` is not specified or equals - ``ALIGN_NONE``, then this field is ignored. If - ``perSeriesAligner`` is specified and does not equal - ``ALIGN_NONE``, then this field must be defined; otherwise an - error is returned. - per_series_aligner: - The approach to be used to align individual time series. Not - all alignment functions may be applied to all time series, - depending on the metric type and value type of the original - time series. Alignment may change the metric type or the value - type of the time series. Time series data must be aligned in - order to perform cross-time series reduction. If - ``crossSeriesReducer`` is specified, then ``perSeriesAligner`` - must be specified and not equal ``ALIGN_NONE`` and - ``alignmentPeriod`` must be specified; otherwise, an error is - returned. - cross_series_reducer: - The approach to be used to combine time series. Not all - reducer functions may be applied to all time series, depending - on the metric type and the value type of the original time - series. Reduction may change the metric type of value type of - the time series. Time series data must be aligned in order to - perform cross-time series reduction. If ``crossSeriesReducer`` - is specified, then ``perSeriesAligner`` must be specified and - not equal ``ALIGN_NONE`` and ``alignmentPeriod`` must be - specified; otherwise, an error is returned. - group_by_fields: - The set of fields to preserve when ``crossSeriesReducer`` is - specified. The ``groupByFields`` determine how the time series - are partitioned into subsets prior to applying the aggregation - function. Each subset contains time series that have the same - value for each of the grouping fields. Each individual time - series is a member of exactly one subset. The - ``crossSeriesReducer`` is applied to each subset of time - series. It is not possible to reduce across different resource - types, so this field implicitly contains ``resource.type``. - Fields not specified in ``groupByFields`` are aggregated away. - If ``groupByFields`` is not specified and all the time series - have the same resource type, then the time series are - aggregated into a single output time series. If - ``crossSeriesReducer`` is not defined, this field is ignored. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.Aggregation) - ), -) -_sym_db.RegisterMessage(Aggregation) - - -DESCRIPTOR._options = None -_SERVICETIER._options = None -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/common_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/common_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/common_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/monitoring/google/cloud/monitoring_v3/proto/dropped_labels.proto b/monitoring/google/cloud/monitoring_v3/proto/dropped_labels.proto deleted file mode 100644 index 9b943ccd0475..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/dropped_labels.proto +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "DroppedLabelsProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// A set of (label, value) pairs which were dropped during aggregation, attached -// to google.api.Distribution.Exemplars in google.api.Distribution values during -// aggregation. -// -// These values are used in combination with the label values that remain on the -// aggregated Distribution timeseries to construct the full label set for the -// exemplar values. The resulting full label set may be used to identify the -// specific task/job/instance (for example) which may be contributing to a -// long-tail, while allowing the storage savings of only storing aggregated -// distribution values for a large group. -// -// Note that there are no guarantees on ordering of the labels from -// exemplar-to-exemplar and from distribution-to-distribution in the same -// stream, and there may be duplicates. It is up to clients to resolve any -// ambiguities. -message DroppedLabels { - // Map from label to its value, for all labels dropped in any aggregation. - map label = 1; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/dropped_labels_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/dropped_labels_pb2.py deleted file mode 100644 index c4df545c33af..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/dropped_labels_pb2.py +++ /dev/null @@ -1,176 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/dropped_labels.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/dropped_labels.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\022DroppedLabelsProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n5google/cloud/monitoring_v3/proto/dropped_labels.proto\x12\x14google.monitoring.v3"|\n\rDroppedLabels\x12=\n\x05label\x18\x01 \x03(\x0b\x32..google.monitoring.v3.DroppedLabels.LabelEntry\x1a,\n\nLabelEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\xaa\x01\n\x18\x63om.google.monitoring.v3B\x12\x44roppedLabelsProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), -) - - -_DROPPEDLABELS_LABELENTRY = _descriptor.Descriptor( - name="LabelEntry", - full_name="google.monitoring.v3.DroppedLabels.LabelEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.monitoring.v3.DroppedLabels.LabelEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.monitoring.v3.DroppedLabels.LabelEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=159, - serialized_end=203, -) - -_DROPPEDLABELS = _descriptor.Descriptor( - name="DroppedLabels", - full_name="google.monitoring.v3.DroppedLabels", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="label", - full_name="google.monitoring.v3.DroppedLabels.label", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[_DROPPEDLABELS_LABELENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=79, - serialized_end=203, -) - -_DROPPEDLABELS_LABELENTRY.containing_type = _DROPPEDLABELS -_DROPPEDLABELS.fields_by_name["label"].message_type = _DROPPEDLABELS_LABELENTRY -DESCRIPTOR.message_types_by_name["DroppedLabels"] = _DROPPEDLABELS -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -DroppedLabels = _reflection.GeneratedProtocolMessageType( - "DroppedLabels", - (_message.Message,), - dict( - LabelEntry=_reflection.GeneratedProtocolMessageType( - "LabelEntry", - (_message.Message,), - dict( - DESCRIPTOR=_DROPPEDLABELS_LABELENTRY, - __module__="google.cloud.monitoring_v3.proto.dropped_labels_pb2" - # @@protoc_insertion_point(class_scope:google.monitoring.v3.DroppedLabels.LabelEntry) - ), - ), - DESCRIPTOR=_DROPPEDLABELS, - __module__="google.cloud.monitoring_v3.proto.dropped_labels_pb2", - __doc__="""A set of (label, value) pairs which were dropped during - aggregation, attached to google.api.Distribution.Exemplars in - google.api.Distribution values during aggregation. - - These values are used in combination with the label values that remain - on the aggregated Distribution timeseries to construct the full label - set for the exemplar values. The resulting full label set may be used to - identify the specific task/job/instance (for example) which may be - contributing to a long-tail, while allowing the storage savings of only - storing aggregated distribution values for a large group. - - Note that there are no guarantees on ordering of the labels from - exemplar-to-exemplar and from distribution-to-distribution in the same - stream, and there may be duplicates. It is up to clients to resolve any - ambiguities. - - - Attributes: - label: - Map from label to its value, for all labels dropped in any - aggregation. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.DroppedLabels) - ), -) -_sym_db.RegisterMessage(DroppedLabels) -_sym_db.RegisterMessage(DroppedLabels.LabelEntry) - - -DESCRIPTOR._options = None -_DROPPEDLABELS_LABELENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/dropped_labels_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/dropped_labels_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/dropped_labels_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/monitoring/google/cloud/monitoring_v3/proto/group.proto b/monitoring/google/cloud/monitoring_v3/proto/group.proto deleted file mode 100644 index 716b270e1160..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/group.proto +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "GroupProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// The description of a dynamic collection of monitored resources. Each group -// has a filter that is matched against monitored resources and their associated -// metadata. If a group's filter matches an available monitored resource, then -// that resource is a member of that group. Groups can contain any number of -// monitored resources, and each monitored resource can be a member of any -// number of groups. -// -// Groups can be nested in parent-child hierarchies. The `parentName` field -// identifies an optional parent for each group. If a group has a parent, then -// the only monitored resources available to be matched by the group's filter -// are the resources contained in the parent group. In other words, a group -// contains the monitored resources that match its filter and the filters of all -// the group's ancestors. A group without a parent can contain any monitored -// resource. -// -// For example, consider an infrastructure running a set of instances with two -// user-defined tags: `"environment"` and `"role"`. A parent group has a filter, -// `environment="production"`. A child of that parent group has a filter, -// `role="transcoder"`. The parent group contains all instances in the -// production environment, regardless of their roles. The child group contains -// instances that have the transcoder role *and* are in the production -// environment. -// -// The monitored resources contained in a group can change at any moment, -// depending on what resources exist and what filters are associated with the -// group and its ancestors. -message Group { - // Output only. The name of this group. The format is - // `"projects/{project_id_or_number}/groups/{group_id}"`. - // When creating a group, this field is ignored and a new name is created - // consisting of the project specified in the call to `CreateGroup` - // and a unique `{group_id}` that is generated automatically. - string name = 1; - - // A user-assigned name for this group, used only for display purposes. - string display_name = 2; - - // The name of the group's parent, if it has one. - // The format is `"projects/{project_id_or_number}/groups/{group_id}"`. - // For groups with no parent, `parentName` is the empty string, `""`. - string parent_name = 3; - - // The filter used to determine which monitored resources belong to this - // group. - string filter = 5; - - // If true, the members of this group are considered to be a cluster. - // The system can perform additional analysis on groups that are clusters. - bool is_cluster = 6; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/group_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/group_pb2.py deleted file mode 100644 index fc1e159fea4b..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/group_pb2.py +++ /dev/null @@ -1,209 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/group.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/group.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\nGroupProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n,google/cloud/monitoring_v3/proto/group.proto\x12\x14google.monitoring.v3"d\n\x05Group\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0bparent_name\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12\x12\n\nis_cluster\x18\x06 \x01(\x08\x42\xa2\x01\n\x18\x63om.google.monitoring.v3B\nGroupProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), -) - - -_GROUP = _descriptor.Descriptor( - name="Group", - full_name="google.monitoring.v3.Group", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.Group.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.monitoring.v3.Group.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="parent_name", - full_name="google.monitoring.v3.Group.parent_name", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.monitoring.v3.Group.filter", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="is_cluster", - full_name="google.monitoring.v3.Group.is_cluster", - index=4, - number=6, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=70, - serialized_end=170, -) - -DESCRIPTOR.message_types_by_name["Group"] = _GROUP -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Group = _reflection.GeneratedProtocolMessageType( - "Group", - (_message.Message,), - dict( - DESCRIPTOR=_GROUP, - __module__="google.cloud.monitoring_v3.proto.group_pb2", - __doc__="""The description of a dynamic collection of monitored - resources. Each group has a filter that is matched against monitored - resources and their associated metadata. If a group's filter matches an - available monitored resource, then that resource is a member of that - group. Groups can contain any number of monitored resources, and each - monitored resource can be a member of any number of groups. - - Groups can be nested in parent-child hierarchies. The ``parentName`` - field identifies an optional parent for each group. If a group has a - parent, then the only monitored resources available to be matched by the - group's filter are the resources contained in the parent group. In other - words, a group contains the monitored resources that match its filter - and the filters of all the group's ancestors. A group without a parent - can contain any monitored resource. - - For example, consider an infrastructure running a set of instances with - two user-defined tags: ``"environment"`` and ``"role"``. A parent group - has a filter, ``environment="production"``. A child of that parent group - has a filter, ``role="transcoder"``. The parent group contains all - instances in the production environment, regardless of their roles. The - child group contains instances that have the transcoder role *and* are - in the production environment. - - The monitored resources contained in a group can change at any moment, - depending on what resources exist and what filters are associated with - the group and its ancestors. - - - Attributes: - name: - Output only. The name of this group. The format is - ``"projects/{project_id_or_number}/groups/{group_id}"``. When - creating a group, this field is ignored and a new name is - created consisting of the project specified in the call to - ``CreateGroup`` and a unique ``{group_id}`` that is generated - automatically. - display_name: - A user-assigned name for this group, used only for display - purposes. - parent_name: - The name of the group's parent, if it has one. The format is - ``"projects/{project_id_or_number}/groups/{group_id}"``. For - groups with no parent, ``parentName`` is the empty string, - ``""``. - filter: - The filter used to determine which monitored resources belong - to this group. - is_cluster: - If true, the members of this group are considered to be a - cluster. The system can perform additional analysis on groups - that are clusters. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.Group) - ), -) -_sym_db.RegisterMessage(Group) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/group_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/group_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/group_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/monitoring/google/cloud/monitoring_v3/proto/group_service.proto b/monitoring/google/cloud/monitoring_v3/proto/group_service.proto deleted file mode 100644 index bf91a916018d..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/group_service.proto +++ /dev/null @@ -1,234 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -import "google/api/annotations.proto"; -import "google/api/monitored_resource.proto"; -import "google/monitoring/v3/common.proto"; -import "google/monitoring/v3/group.proto"; -import "google/protobuf/empty.proto"; -import "google/api/client.proto"; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "GroupServiceProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// The Group API lets you inspect and manage your -// [groups](#google.monitoring.v3.Group). -// -// A group is a named filter that is used to identify -// a collection of monitored resources. Groups are typically used to -// mirror the physical and/or logical topology of the environment. -// Because group membership is computed dynamically, monitored -// resources that are started in the future are automatically placed -// in matching groups. By using a group to name monitored resources in, -// for example, an alert policy, the target of that alert policy is -// updated automatically as monitored resources are added and removed -// from the infrastructure. -service GroupService { - option (google.api.default_host) = "monitoring.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/monitoring," - "https://www.googleapis.com/auth/monitoring.read"; - - // Lists the existing groups. - rpc ListGroups(ListGroupsRequest) returns (ListGroupsResponse) { - option (google.api.http) = { - get: "/v3/{name=projects/*}/groups" - }; - } - - // Gets a single group. - rpc GetGroup(GetGroupRequest) returns (Group) { - option (google.api.http) = { - get: "/v3/{name=projects/*/groups/*}" - }; - } - - // Creates a new group. - rpc CreateGroup(CreateGroupRequest) returns (Group) { - option (google.api.http) = { - post: "/v3/{name=projects/*}/groups" - body: "group" - }; - } - - // Updates an existing group. - // You can change any group attributes except `name`. - rpc UpdateGroup(UpdateGroupRequest) returns (Group) { - option (google.api.http) = { - put: "/v3/{group.name=projects/*/groups/*}" - body: "group" - }; - } - - // Deletes an existing group. - rpc DeleteGroup(DeleteGroupRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v3/{name=projects/*/groups/*}" - }; - } - - // Lists the monitored resources that are members of a group. - rpc ListGroupMembers(ListGroupMembersRequest) returns (ListGroupMembersResponse) { - option (google.api.http) = { - get: "/v3/{name=projects/*/groups/*}/members" - }; - } -} - -// The `ListGroup` request. -message ListGroupsRequest { - // The project whose groups are to be listed. The format is - // `"projects/{project_id_or_number}"`. - string name = 7; - - // An optional filter consisting of a single group name. The filters limit - // the groups returned based on their parent-child relationship with the - // specified group. If no filter is specified, all groups are returned. - oneof filter { - // A group name: `"projects/{project_id_or_number}/groups/{group_id}"`. - // Returns groups whose `parentName` field contains the group - // name. If no groups have this parent, the results are empty. - string children_of_group = 2; - - // A group name: `"projects/{project_id_or_number}/groups/{group_id}"`. - // Returns groups that are ancestors of the specified group. - // The groups are returned in order, starting with the immediate parent and - // ending with the most distant ancestor. If the specified group has no - // immediate parent, the results are empty. - string ancestors_of_group = 3; - - // A group name: `"projects/{project_id_or_number}/groups/{group_id}"`. - // Returns the descendants of the specified group. This is a superset of - // the results returned by the `childrenOfGroup` filter, and includes - // children-of-children, and so forth. - string descendants_of_group = 4; - } - - // A positive number that is the maximum number of results to return. - int32 page_size = 5; - - // If this field is not empty then it must contain the `nextPageToken` value - // returned by a previous call to this method. Using this field causes the - // method to return additional results from the previous method call. - string page_token = 6; -} - -// The `ListGroups` response. -message ListGroupsResponse { - // The groups that match the specified filters. - repeated Group group = 1; - - // If there are more results than have been returned, then this field is set - // to a non-empty value. To see the additional results, - // use that value as `pageToken` in the next call to this method. - string next_page_token = 2; -} - -// The `GetGroup` request. -message GetGroupRequest { - // The group to retrieve. The format is - // `"projects/{project_id_or_number}/groups/{group_id}"`. - string name = 3; -} - -// The `CreateGroup` request. -message CreateGroupRequest { - // The project in which to create the group. The format is - // `"projects/{project_id_or_number}"`. - string name = 4; - - // A group definition. It is an error to define the `name` field because - // the system assigns the name. - Group group = 2; - - // If true, validate this request but do not create the group. - bool validate_only = 3; -} - -// The `UpdateGroup` request. -message UpdateGroupRequest { - // The new definition of the group. All fields of the existing group, - // excepting `name`, are replaced with the corresponding fields of this group. - Group group = 2; - - // If true, validate this request but do not update the existing group. - bool validate_only = 3; -} - -// The `DeleteGroup` request. The default behavior is to be able to delete a -// single group without any descendants. -message DeleteGroupRequest { - // The group to delete. The format is - // `"projects/{project_id_or_number}/groups/{group_id}"`. - string name = 3; - - // If this field is true, then the request means to delete a group with all - // its descendants. Otherwise, the request means to delete a group only when - // it has no descendants. The default value is false. - bool recursive = 4; -} - -// The `ListGroupMembers` request. -message ListGroupMembersRequest { - // The group whose members are listed. The format is - // `"projects/{project_id_or_number}/groups/{group_id}"`. - string name = 7; - - // A positive number that is the maximum number of results to return. - int32 page_size = 3; - - // If this field is not empty then it must contain the `nextPageToken` value - // returned by a previous call to this method. Using this field causes the - // method to return additional results from the previous method call. - string page_token = 4; - - // An optional [list filter](/monitoring/api/learn_more#filtering) describing - // the members to be returned. The filter may reference the type, labels, and - // metadata of monitored resources that comprise the group. - // For example, to return only resources representing Compute Engine VM - // instances, use this filter: - // - // resource.type = "gce_instance" - string filter = 5; - - // An optional time interval for which results should be returned. Only - // members that were part of the group during the specified interval are - // included in the response. If no interval is provided then the group - // membership over the last minute is returned. - TimeInterval interval = 6; -} - -// The `ListGroupMembers` response. -message ListGroupMembersResponse { - // A set of monitored resources in the group. - repeated google.api.MonitoredResource members = 1; - - // If there are more results than have been returned, then this field is - // set to a non-empty value. To see the additional results, use that value as - // `pageToken` in the next call to this method. - string next_page_token = 2; - - // The total number of elements matching this request. - int32 total_size = 3; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/group_service_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/group_service_pb2.py deleted file mode 100644 index c16eeb9ce42e..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/group_service_pb2.py +++ /dev/null @@ -1,1022 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/group_service.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.cloud.monitoring_v3.proto import ( - common_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2, -) -from google.cloud.monitoring_v3.proto import ( - group_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/group_service.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\021GroupServiceProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n4google/cloud/monitoring_v3/proto/group_service.proto\x12\x14google.monitoring.v3\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a-google/cloud/monitoring_v3/proto/common.proto\x1a,google/cloud/monitoring_v3/proto/group.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x17google/api/client.proto"\xad\x01\n\x11ListGroupsRequest\x12\x0c\n\x04name\x18\x07 \x01(\t\x12\x1b\n\x11\x63hildren_of_group\x18\x02 \x01(\tH\x00\x12\x1c\n\x12\x61ncestors_of_group\x18\x03 \x01(\tH\x00\x12\x1e\n\x14\x64\x65scendants_of_group\x18\x04 \x01(\tH\x00\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\tB\x08\n\x06\x66ilter"Y\n\x12ListGroupsResponse\x12*\n\x05group\x18\x01 \x03(\x0b\x32\x1b.google.monitoring.v3.Group\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x1f\n\x0fGetGroupRequest\x12\x0c\n\x04name\x18\x03 \x01(\t"e\n\x12\x43reateGroupRequest\x12\x0c\n\x04name\x18\x04 \x01(\t\x12*\n\x05group\x18\x02 \x01(\x0b\x32\x1b.google.monitoring.v3.Group\x12\x15\n\rvalidate_only\x18\x03 \x01(\x08"W\n\x12UpdateGroupRequest\x12*\n\x05group\x18\x02 \x01(\x0b\x32\x1b.google.monitoring.v3.Group\x12\x15\n\rvalidate_only\x18\x03 \x01(\x08"5\n\x12\x44\x65leteGroupRequest\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x11\n\trecursive\x18\x04 \x01(\x08"\x94\x01\n\x17ListGroupMembersRequest\x12\x0c\n\x04name\x18\x07 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12\x34\n\x08interval\x18\x06 \x01(\x0b\x32".google.monitoring.v3.TimeInterval"w\n\x18ListGroupMembersResponse\x12.\n\x07members\x18\x01 \x03(\x0b\x32\x1d.google.api.MonitoredResource\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12\x12\n\ntotal_size\x18\x03 \x01(\x05\x32\xe7\x07\n\x0cGroupService\x12\x85\x01\n\nListGroups\x12\'.google.monitoring.v3.ListGroupsRequest\x1a(.google.monitoring.v3.ListGroupsResponse"$\x82\xd3\xe4\x93\x02\x1e\x12\x1c/v3/{name=projects/*}/groups\x12v\n\x08GetGroup\x12%.google.monitoring.v3.GetGroupRequest\x1a\x1b.google.monitoring.v3.Group"&\x82\xd3\xe4\x93\x02 \x12\x1e/v3/{name=projects/*/groups/*}\x12\x81\x01\n\x0b\x43reateGroup\x12(.google.monitoring.v3.CreateGroupRequest\x1a\x1b.google.monitoring.v3.Group"+\x82\xd3\xe4\x93\x02%"\x1c/v3/{name=projects/*}/groups:\x05group\x12\x89\x01\n\x0bUpdateGroup\x12(.google.monitoring.v3.UpdateGroupRequest\x1a\x1b.google.monitoring.v3.Group"3\x82\xd3\xe4\x93\x02-\x1a$/v3/{group.name=projects/*/groups/*}:\x05group\x12w\n\x0b\x44\x65leteGroup\x12(.google.monitoring.v3.DeleteGroupRequest\x1a\x16.google.protobuf.Empty"&\x82\xd3\xe4\x93\x02 *\x1e/v3/{name=projects/*/groups/*}\x12\xa1\x01\n\x10ListGroupMembers\x12-.google.monitoring.v3.ListGroupMembersRequest\x1a..google.monitoring.v3.ListGroupMembersResponse".\x82\xd3\xe4\x93\x02(\x12&/v3/{name=projects/*/groups/*}/members\x1a\xa9\x01\xca\x41\x19monitoring.googleapis.com\xd2\x41\x89\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.readB\xa9\x01\n\x18\x63om.google.monitoring.v3B\x11GroupServiceProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_LISTGROUPSREQUEST = _descriptor.Descriptor( - name="ListGroupsRequest", - full_name="google.monitoring.v3.ListGroupsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.ListGroupsRequest.name", - index=0, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="children_of_group", - full_name="google.monitoring.v3.ListGroupsRequest.children_of_group", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="ancestors_of_group", - full_name="google.monitoring.v3.ListGroupsRequest.ancestors_of_group", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="descendants_of_group", - full_name="google.monitoring.v3.ListGroupsRequest.descendants_of_group", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.monitoring.v3.ListGroupsRequest.page_size", - index=4, - number=5, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.monitoring.v3.ListGroupsRequest.page_token", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="filter", - full_name="google.monitoring.v3.ListGroupsRequest.filter", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=293, - serialized_end=466, -) - - -_LISTGROUPSRESPONSE = _descriptor.Descriptor( - name="ListGroupsResponse", - full_name="google.monitoring.v3.ListGroupsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="group", - full_name="google.monitoring.v3.ListGroupsResponse.group", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.monitoring.v3.ListGroupsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=468, - serialized_end=557, -) - - -_GETGROUPREQUEST = _descriptor.Descriptor( - name="GetGroupRequest", - full_name="google.monitoring.v3.GetGroupRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.GetGroupRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=559, - serialized_end=590, -) - - -_CREATEGROUPREQUEST = _descriptor.Descriptor( - name="CreateGroupRequest", - full_name="google.monitoring.v3.CreateGroupRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.CreateGroupRequest.name", - index=0, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="group", - full_name="google.monitoring.v3.CreateGroupRequest.group", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="validate_only", - full_name="google.monitoring.v3.CreateGroupRequest.validate_only", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=592, - serialized_end=693, -) - - -_UPDATEGROUPREQUEST = _descriptor.Descriptor( - name="UpdateGroupRequest", - full_name="google.monitoring.v3.UpdateGroupRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="group", - full_name="google.monitoring.v3.UpdateGroupRequest.group", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="validate_only", - full_name="google.monitoring.v3.UpdateGroupRequest.validate_only", - index=1, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=695, - serialized_end=782, -) - - -_DELETEGROUPREQUEST = _descriptor.Descriptor( - name="DeleteGroupRequest", - full_name="google.monitoring.v3.DeleteGroupRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.DeleteGroupRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="recursive", - full_name="google.monitoring.v3.DeleteGroupRequest.recursive", - index=1, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=784, - serialized_end=837, -) - - -_LISTGROUPMEMBERSREQUEST = _descriptor.Descriptor( - name="ListGroupMembersRequest", - full_name="google.monitoring.v3.ListGroupMembersRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.ListGroupMembersRequest.name", - index=0, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.monitoring.v3.ListGroupMembersRequest.page_size", - index=1, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.monitoring.v3.ListGroupMembersRequest.page_token", - index=2, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.monitoring.v3.ListGroupMembersRequest.filter", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="interval", - full_name="google.monitoring.v3.ListGroupMembersRequest.interval", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=840, - serialized_end=988, -) - - -_LISTGROUPMEMBERSRESPONSE = _descriptor.Descriptor( - name="ListGroupMembersResponse", - full_name="google.monitoring.v3.ListGroupMembersResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="members", - full_name="google.monitoring.v3.ListGroupMembersResponse.members", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.monitoring.v3.ListGroupMembersResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="total_size", - full_name="google.monitoring.v3.ListGroupMembersResponse.total_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=990, - serialized_end=1109, -) - -_LISTGROUPSREQUEST.oneofs_by_name["filter"].fields.append( - _LISTGROUPSREQUEST.fields_by_name["children_of_group"] -) -_LISTGROUPSREQUEST.fields_by_name[ - "children_of_group" -].containing_oneof = _LISTGROUPSREQUEST.oneofs_by_name["filter"] -_LISTGROUPSREQUEST.oneofs_by_name["filter"].fields.append( - _LISTGROUPSREQUEST.fields_by_name["ancestors_of_group"] -) -_LISTGROUPSREQUEST.fields_by_name[ - "ancestors_of_group" -].containing_oneof = _LISTGROUPSREQUEST.oneofs_by_name["filter"] -_LISTGROUPSREQUEST.oneofs_by_name["filter"].fields.append( - _LISTGROUPSREQUEST.fields_by_name["descendants_of_group"] -) -_LISTGROUPSREQUEST.fields_by_name[ - "descendants_of_group" -].containing_oneof = _LISTGROUPSREQUEST.oneofs_by_name["filter"] -_LISTGROUPSRESPONSE.fields_by_name[ - "group" -].message_type = google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2._GROUP -_CREATEGROUPREQUEST.fields_by_name[ - "group" -].message_type = google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2._GROUP -_UPDATEGROUPREQUEST.fields_by_name[ - "group" -].message_type = google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2._GROUP -_LISTGROUPMEMBERSREQUEST.fields_by_name[ - "interval" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2._TIMEINTERVAL -) -_LISTGROUPMEMBERSRESPONSE.fields_by_name[ - "members" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE -DESCRIPTOR.message_types_by_name["ListGroupsRequest"] = _LISTGROUPSREQUEST -DESCRIPTOR.message_types_by_name["ListGroupsResponse"] = _LISTGROUPSRESPONSE -DESCRIPTOR.message_types_by_name["GetGroupRequest"] = _GETGROUPREQUEST -DESCRIPTOR.message_types_by_name["CreateGroupRequest"] = _CREATEGROUPREQUEST -DESCRIPTOR.message_types_by_name["UpdateGroupRequest"] = _UPDATEGROUPREQUEST -DESCRIPTOR.message_types_by_name["DeleteGroupRequest"] = _DELETEGROUPREQUEST -DESCRIPTOR.message_types_by_name["ListGroupMembersRequest"] = _LISTGROUPMEMBERSREQUEST -DESCRIPTOR.message_types_by_name["ListGroupMembersResponse"] = _LISTGROUPMEMBERSRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ListGroupsRequest = _reflection.GeneratedProtocolMessageType( - "ListGroupsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTGROUPSREQUEST, - __module__="google.cloud.monitoring_v3.proto.group_service_pb2", - __doc__="""The ``ListGroup`` request. - - - Attributes: - name: - The project whose groups are to be listed. The format is - ``"projects/{project_id_or_number}"``. - filter: - An optional filter consisting of a single group name. The - filters limit the groups returned based on their parent-child - relationship with the specified group. If no filter is - specified, all groups are returned. - children_of_group: - A group name: - ``"projects/{project_id_or_number}/groups/{group_id}"``. - Returns groups whose ``parentName`` field contains the group - name. If no groups have this parent, the results are empty. - ancestors_of_group: - A group name: - ``"projects/{project_id_or_number}/groups/{group_id}"``. - Returns groups that are ancestors of the specified group. The - groups are returned in order, starting with the immediate - parent and ending with the most distant ancestor. If the - specified group has no immediate parent, the results are - empty. - descendants_of_group: - A group name: - ``"projects/{project_id_or_number}/groups/{group_id}"``. - Returns the descendants of the specified group. This is a - superset of the results returned by the ``childrenOfGroup`` - filter, and includes children-of-children, and so forth. - page_size: - A positive number that is the maximum number of results to - return. - page_token: - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListGroupsRequest) - ), -) -_sym_db.RegisterMessage(ListGroupsRequest) - -ListGroupsResponse = _reflection.GeneratedProtocolMessageType( - "ListGroupsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTGROUPSRESPONSE, - __module__="google.cloud.monitoring_v3.proto.group_service_pb2", - __doc__="""The ``ListGroups`` response. - - - Attributes: - group: - The groups that match the specified filters. - next_page_token: - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``pageToken`` in the next call to - this method. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListGroupsResponse) - ), -) -_sym_db.RegisterMessage(ListGroupsResponse) - -GetGroupRequest = _reflection.GeneratedProtocolMessageType( - "GetGroupRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETGROUPREQUEST, - __module__="google.cloud.monitoring_v3.proto.group_service_pb2", - __doc__="""The ``GetGroup`` request. - - - Attributes: - name: - The group to retrieve. The format is - ``"projects/{project_id_or_number}/groups/{group_id}"``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.GetGroupRequest) - ), -) -_sym_db.RegisterMessage(GetGroupRequest) - -CreateGroupRequest = _reflection.GeneratedProtocolMessageType( - "CreateGroupRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATEGROUPREQUEST, - __module__="google.cloud.monitoring_v3.proto.group_service_pb2", - __doc__="""The ``CreateGroup`` request. - - - Attributes: - name: - The project in which to create the group. The format is - ``"projects/{project_id_or_number}"``. - group: - A group definition. It is an error to define the ``name`` - field because the system assigns the name. - validate_only: - If true, validate this request but do not create the group. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateGroupRequest) - ), -) -_sym_db.RegisterMessage(CreateGroupRequest) - -UpdateGroupRequest = _reflection.GeneratedProtocolMessageType( - "UpdateGroupRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATEGROUPREQUEST, - __module__="google.cloud.monitoring_v3.proto.group_service_pb2", - __doc__="""The ``UpdateGroup`` request. - - - Attributes: - group: - The new definition of the group. All fields of the existing - group, excepting ``name``, are replaced with the corresponding - fields of this group. - validate_only: - If true, validate this request but do not update the existing - group. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UpdateGroupRequest) - ), -) -_sym_db.RegisterMessage(UpdateGroupRequest) - -DeleteGroupRequest = _reflection.GeneratedProtocolMessageType( - "DeleteGroupRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEGROUPREQUEST, - __module__="google.cloud.monitoring_v3.proto.group_service_pb2", - __doc__="""The ``DeleteGroup`` request. The default behavior is to be - able to delete a single group without any descendants. - - - Attributes: - name: - The group to delete. The format is - ``"projects/{project_id_or_number}/groups/{group_id}"``. - recursive: - If this field is true, then the request means to delete a - group with all its descendants. Otherwise, the request means - to delete a group only when it has no descendants. The default - value is false. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.DeleteGroupRequest) - ), -) -_sym_db.RegisterMessage(DeleteGroupRequest) - -ListGroupMembersRequest = _reflection.GeneratedProtocolMessageType( - "ListGroupMembersRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTGROUPMEMBERSREQUEST, - __module__="google.cloud.monitoring_v3.proto.group_service_pb2", - __doc__="""The ``ListGroupMembers`` request. - - - Attributes: - name: - The group whose members are listed. The format is - ``"projects/{project_id_or_number}/groups/{group_id}"``. - page_size: - A positive number that is the maximum number of results to - return. - page_token: - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - filter: - An optional `list filter - `__ describing the - members to be returned. The filter may reference the type, - labels, and metadata of monitored resources that comprise the - group. For example, to return only resources representing - Compute Engine VM instances, use this filter: :: - resource.type = "gce_instance" - interval: - An optional time interval for which results should be - returned. Only members that were part of the group during the - specified interval are included in the response. If no - interval is provided then the group membership over the last - minute is returned. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListGroupMembersRequest) - ), -) -_sym_db.RegisterMessage(ListGroupMembersRequest) - -ListGroupMembersResponse = _reflection.GeneratedProtocolMessageType( - "ListGroupMembersResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTGROUPMEMBERSRESPONSE, - __module__="google.cloud.monitoring_v3.proto.group_service_pb2", - __doc__="""The ``ListGroupMembers`` response. - - - Attributes: - members: - A set of monitored resources in the group. - next_page_token: - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``pageToken`` in the next call to - this method. - total_size: - The total number of elements matching this request. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListGroupMembersResponse) - ), -) -_sym_db.RegisterMessage(ListGroupMembersResponse) - - -DESCRIPTOR._options = None - -_GROUPSERVICE = _descriptor.ServiceDescriptor( - name="GroupService", - full_name="google.monitoring.v3.GroupService", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\031monitoring.googleapis.com\322A\211\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.read" - ), - serialized_start=1112, - serialized_end=2111, - methods=[ - _descriptor.MethodDescriptor( - name="ListGroups", - full_name="google.monitoring.v3.GroupService.ListGroups", - index=0, - containing_service=None, - input_type=_LISTGROUPSREQUEST, - output_type=_LISTGROUPSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002\036\022\034/v3/{name=projects/*}/groups" - ), - ), - _descriptor.MethodDescriptor( - name="GetGroup", - full_name="google.monitoring.v3.GroupService.GetGroup", - index=1, - containing_service=None, - input_type=_GETGROUPREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2._GROUP, - serialized_options=_b( - "\202\323\344\223\002 \022\036/v3/{name=projects/*/groups/*}" - ), - ), - _descriptor.MethodDescriptor( - name="CreateGroup", - full_name="google.monitoring.v3.GroupService.CreateGroup", - index=2, - containing_service=None, - input_type=_CREATEGROUPREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2._GROUP, - serialized_options=_b( - '\202\323\344\223\002%"\034/v3/{name=projects/*}/groups:\005group' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateGroup", - full_name="google.monitoring.v3.GroupService.UpdateGroup", - index=3, - containing_service=None, - input_type=_UPDATEGROUPREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2._GROUP, - serialized_options=_b( - "\202\323\344\223\002-\032$/v3/{group.name=projects/*/groups/*}:\005group" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteGroup", - full_name="google.monitoring.v3.GroupService.DeleteGroup", - index=4, - containing_service=None, - input_type=_DELETEGROUPREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002 *\036/v3/{name=projects/*/groups/*}" - ), - ), - _descriptor.MethodDescriptor( - name="ListGroupMembers", - full_name="google.monitoring.v3.GroupService.ListGroupMembers", - index=5, - containing_service=None, - input_type=_LISTGROUPMEMBERSREQUEST, - output_type=_LISTGROUPMEMBERSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002(\022&/v3/{name=projects/*/groups/*}/members" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_GROUPSERVICE) - -DESCRIPTOR.services_by_name["GroupService"] = _GROUPSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/group_service_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/group_service_pb2_grpc.py deleted file mode 100644 index d681b6c18642..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/group_service_pb2_grpc.py +++ /dev/null @@ -1,161 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.monitoring_v3.proto import ( - group_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2, -) -from google.cloud.monitoring_v3.proto import ( - group_service_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class GroupServiceStub(object): - """The Group API lets you inspect and manage your - [groups](#google.monitoring.v3.Group). - - A group is a named filter that is used to identify - a collection of monitored resources. Groups are typically used to - mirror the physical and/or logical topology of the environment. - Because group membership is computed dynamically, monitored - resources that are started in the future are automatically placed - in matching groups. By using a group to name monitored resources in, - for example, an alert policy, the target of that alert policy is - updated automatically as monitored resources are added and removed - from the infrastructure. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListGroups = channel.unary_unary( - "/google.monitoring.v3.GroupService/ListGroups", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.ListGroupsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.ListGroupsResponse.FromString, - ) - self.GetGroup = channel.unary_unary( - "/google.monitoring.v3.GroupService/GetGroup", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.GetGroupRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2.Group.FromString, - ) - self.CreateGroup = channel.unary_unary( - "/google.monitoring.v3.GroupService/CreateGroup", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.CreateGroupRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2.Group.FromString, - ) - self.UpdateGroup = channel.unary_unary( - "/google.monitoring.v3.GroupService/UpdateGroup", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.UpdateGroupRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2.Group.FromString, - ) - self.DeleteGroup = channel.unary_unary( - "/google.monitoring.v3.GroupService/DeleteGroup", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.DeleteGroupRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ListGroupMembers = channel.unary_unary( - "/google.monitoring.v3.GroupService/ListGroupMembers", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.ListGroupMembersRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.ListGroupMembersResponse.FromString, - ) - - -class GroupServiceServicer(object): - """The Group API lets you inspect and manage your - [groups](#google.monitoring.v3.Group). - - A group is a named filter that is used to identify - a collection of monitored resources. Groups are typically used to - mirror the physical and/or logical topology of the environment. - Because group membership is computed dynamically, monitored - resources that are started in the future are automatically placed - in matching groups. By using a group to name monitored resources in, - for example, an alert policy, the target of that alert policy is - updated automatically as monitored resources are added and removed - from the infrastructure. - """ - - def ListGroups(self, request, context): - """Lists the existing groups. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetGroup(self, request, context): - """Gets a single group. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateGroup(self, request, context): - """Creates a new group. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateGroup(self, request, context): - """Updates an existing group. - You can change any group attributes except `name`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteGroup(self, request, context): - """Deletes an existing group. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListGroupMembers(self, request, context): - """Lists the monitored resources that are members of a group. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_GroupServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "ListGroups": grpc.unary_unary_rpc_method_handler( - servicer.ListGroups, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.ListGroupsRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.ListGroupsResponse.SerializeToString, - ), - "GetGroup": grpc.unary_unary_rpc_method_handler( - servicer.GetGroup, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.GetGroupRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2.Group.SerializeToString, - ), - "CreateGroup": grpc.unary_unary_rpc_method_handler( - servicer.CreateGroup, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.CreateGroupRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2.Group.SerializeToString, - ), - "UpdateGroup": grpc.unary_unary_rpc_method_handler( - servicer.UpdateGroup, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.UpdateGroupRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__pb2.Group.SerializeToString, - ), - "DeleteGroup": grpc.unary_unary_rpc_method_handler( - servicer.DeleteGroup, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.DeleteGroupRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "ListGroupMembers": grpc.unary_unary_rpc_method_handler( - servicer.ListGroupMembers, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.ListGroupMembersRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_group__service__pb2.ListGroupMembersResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.monitoring.v3.GroupService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/monitoring/google/cloud/monitoring_v3/proto/metric.proto b/monitoring/google/cloud/monitoring_v3/proto/metric.proto deleted file mode 100644 index 3c202ed412c9..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/metric.proto +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -import "google/api/distribution.proto"; -import "google/api/label.proto"; -import "google/api/metric.proto"; -import "google/api/monitored_resource.proto"; -import "google/monitoring/v3/common.proto"; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "MetricProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// A single data point in a time series. -message Point { - // The time interval to which the data point applies. For `GAUGE` metrics, - // the start time is optional, but if it is supplied, it must equal the - // end time. For `DELTA` metrics, the start - // and end time should specify a non-zero interval, with subsequent points - // specifying contiguous and non-overlapping intervals. For `CUMULATIVE` - // metrics, the start and end time should specify a non-zero interval, with - // subsequent points specifying the same start time and increasing end times, - // until an event resets the cumulative value to zero and sets a new start - // time for the following points. - TimeInterval interval = 1; - - // The value of the data point. - TypedValue value = 2; -} - -// A collection of data points that describes the time-varying values -// of a metric. A time series is identified by a combination of a -// fully-specified monitored resource and a fully-specified metric. -// This type is used for both listing and creating time series. -message TimeSeries { - // The associated metric. A fully-specified metric used to identify the time - // series. - google.api.Metric metric = 1; - - // The associated monitored resource. Custom metrics can use only certain - // monitored resource types in their time series data. - google.api.MonitoredResource resource = 2; - - // Output only. The associated monitored resource metadata. When reading a - // a timeseries, this field will include metadata labels that are explicitly - // named in the reduction. When creating a timeseries, this field is ignored. - google.api.MonitoredResourceMetadata metadata = 7; - - // The metric kind of the time series. When listing time series, this metric - // kind might be different from the metric kind of the associated metric if - // this time series is an alignment or reduction of other time series. - // - // When creating a time series, this field is optional. If present, it must be - // the same as the metric kind of the associated metric. If the associated - // metric's descriptor must be auto-created, then this field specifies the - // metric kind of the new descriptor and must be either `GAUGE` (the default) - // or `CUMULATIVE`. - google.api.MetricDescriptor.MetricKind metric_kind = 3; - - // The value type of the time series. When listing time series, this value - // type might be different from the value type of the associated metric if - // this time series is an alignment or reduction of other time series. - // - // When creating a time series, this field is optional. If present, it must be - // the same as the type of the data in the `points` field. - google.api.MetricDescriptor.ValueType value_type = 4; - - // The data points of this time series. When listing time series, points are - // returned in reverse time order. - // - // When creating a time series, this field must contain exactly one point and - // the point's type must be the same as the value type of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // the value type of the descriptor is determined by the point's type, which - // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. - repeated Point points = 5; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/metric_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/metric_pb2.py deleted file mode 100644 index 72e2e92c51af..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/metric_pb2.py +++ /dev/null @@ -1,353 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/metric.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import distribution_pb2 as google_dot_api_dot_distribution__pb2 -from google.api import label_pb2 as google_dot_api_dot_label__pb2 -from google.api import metric_pb2 as google_dot_api_dot_metric__pb2 -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.cloud.monitoring_v3.proto import ( - common_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/metric.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\013MetricProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n-google/cloud/monitoring_v3/proto/metric.proto\x12\x14google.monitoring.v3\x1a\x1dgoogle/api/distribution.proto\x1a\x16google/api/label.proto\x1a\x17google/api/metric.proto\x1a#google/api/monitored_resource.proto\x1a-google/cloud/monitoring_v3/proto/common.proto"n\n\x05Point\x12\x34\n\x08interval\x18\x01 \x01(\x0b\x32".google.monitoring.v3.TimeInterval\x12/\n\x05value\x18\x02 \x01(\x0b\x32 .google.monitoring.v3.TypedValue"\xc1\x02\n\nTimeSeries\x12"\n\x06metric\x18\x01 \x01(\x0b\x32\x12.google.api.Metric\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x37\n\x08metadata\x18\x07 \x01(\x0b\x32%.google.api.MonitoredResourceMetadata\x12<\n\x0bmetric_kind\x18\x03 \x01(\x0e\x32\'.google.api.MetricDescriptor.MetricKind\x12:\n\nvalue_type\x18\x04 \x01(\x0e\x32&.google.api.MetricDescriptor.ValueType\x12+\n\x06points\x18\x05 \x03(\x0b\x32\x1b.google.monitoring.v3.PointB\xa3\x01\n\x18\x63om.google.monitoring.v3B\x0bMetricProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_distribution__pb2.DESCRIPTOR, - google_dot_api_dot_label__pb2.DESCRIPTOR, - google_dot_api_dot_metric__pb2.DESCRIPTOR, - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2.DESCRIPTOR, - ], -) - - -_POINT = _descriptor.Descriptor( - name="Point", - full_name="google.monitoring.v3.Point", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="interval", - full_name="google.monitoring.v3.Point.interval", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.monitoring.v3.Point.value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=235, - serialized_end=345, -) - - -_TIMESERIES = _descriptor.Descriptor( - name="TimeSeries", - full_name="google.monitoring.v3.TimeSeries", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric", - full_name="google.monitoring.v3.TimeSeries.metric", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource", - full_name="google.monitoring.v3.TimeSeries.resource", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metadata", - full_name="google.monitoring.v3.TimeSeries.metadata", - index=2, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric_kind", - full_name="google.monitoring.v3.TimeSeries.metric_kind", - index=3, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value_type", - full_name="google.monitoring.v3.TimeSeries.value_type", - index=4, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="points", - full_name="google.monitoring.v3.TimeSeries.points", - index=5, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=348, - serialized_end=669, -) - -_POINT.fields_by_name[ - "interval" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2._TIMEINTERVAL -) -_POINT.fields_by_name[ - "value" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2._TYPEDVALUE -) -_TIMESERIES.fields_by_name[ - "metric" -].message_type = google_dot_api_dot_metric__pb2._METRIC -_TIMESERIES.fields_by_name[ - "resource" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE -_TIMESERIES.fields_by_name[ - "metadata" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEMETADATA -_TIMESERIES.fields_by_name[ - "metric_kind" -].enum_type = google_dot_api_dot_metric__pb2._METRICDESCRIPTOR_METRICKIND -_TIMESERIES.fields_by_name[ - "value_type" -].enum_type = google_dot_api_dot_metric__pb2._METRICDESCRIPTOR_VALUETYPE -_TIMESERIES.fields_by_name["points"].message_type = _POINT -DESCRIPTOR.message_types_by_name["Point"] = _POINT -DESCRIPTOR.message_types_by_name["TimeSeries"] = _TIMESERIES -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Point = _reflection.GeneratedProtocolMessageType( - "Point", - (_message.Message,), - dict( - DESCRIPTOR=_POINT, - __module__="google.cloud.monitoring_v3.proto.metric_pb2", - __doc__="""A single data point in a time series. - - - Attributes: - interval: - The time interval to which the data point applies. For - ``GAUGE`` metrics, the start time is optional, but if it is - supplied, it must equal the end time. For ``DELTA`` metrics, - the start and end time should specify a non-zero interval, - with subsequent points specifying contiguous and non- - overlapping intervals. For ``CUMULATIVE`` metrics, the start - and end time should specify a non-zero interval, with - subsequent points specifying the same start time and - increasing end times, until an event resets the cumulative - value to zero and sets a new start time for the following - points. - value: - The value of the data point. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.Point) - ), -) -_sym_db.RegisterMessage(Point) - -TimeSeries = _reflection.GeneratedProtocolMessageType( - "TimeSeries", - (_message.Message,), - dict( - DESCRIPTOR=_TIMESERIES, - __module__="google.cloud.monitoring_v3.proto.metric_pb2", - __doc__="""A collection of data points that describes the - time-varying values of a metric. A time series is identified by a - combination of a fully-specified monitored resource and a - fully-specified metric. This type is used for both listing and creating - time series. - - - Attributes: - metric: - The associated metric. A fully-specified metric used to - identify the time series. - resource: - The associated monitored resource. Custom metrics can use only - certain monitored resource types in their time series data. - metadata: - Output only. The associated monitored resource metadata. When - reading a a timeseries, this field will include metadata - labels that are explicitly named in the reduction. When - creating a timeseries, this field is ignored. - metric_kind: - The metric kind of the time series. When listing time series, - this metric kind might be different from the metric kind of - the associated metric if this time series is an alignment or - reduction of other time series. When creating a time series, - this field is optional. If present, it must be the same as the - metric kind of the associated metric. If the associated - metric's descriptor must be auto-created, then this field - specifies the metric kind of the new descriptor and must be - either ``GAUGE`` (the default) or ``CUMULATIVE``. - value_type: - The value type of the time series. When listing time series, - this value type might be different from the value type of the - associated metric if this time series is an alignment or - reduction of other time series. When creating a time series, - this field is optional. If present, it must be the same as the - type of the data in the ``points`` field. - points: - The data points of this time series. When listing time series, - points are returned in reverse time order. When creating a - time series, this field must contain exactly one point and the - point's type must be the same as the value type of the - associated metric. If the associated metric's descriptor must - be auto-created, then the value type of the descriptor is - determined by the point's type, which must be ``BOOL``, - ``INT64``, ``DOUBLE``, or ``DISTRIBUTION``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.TimeSeries) - ), -) -_sym_db.RegisterMessage(TimeSeries) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/metric_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/metric_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/metric_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/monitoring/google/cloud/monitoring_v3/proto/metric_service.proto b/monitoring/google/cloud/monitoring_v3/proto/metric_service.proto deleted file mode 100644 index 101dee40e973..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/metric_service.proto +++ /dev/null @@ -1,341 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -import "google/api/annotations.proto"; -import "google/api/metric.proto"; -import "google/api/monitored_resource.proto"; -import "google/monitoring/v3/alert.proto"; -import "google/monitoring/v3/common.proto"; -import "google/monitoring/v3/metric.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/rpc/status.proto"; -import "google/api/client.proto"; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "MetricServiceProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// Manages metric descriptors, monitored resource descriptors, and -// time series data. -service MetricService { - option (google.api.default_host) = "monitoring.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/monitoring," - "https://www.googleapis.com/auth/monitoring.read," - "https://www.googleapis.com/auth/monitoring.write"; - - // Lists monitored resource descriptors that match a filter. This method does not require a Stackdriver account. - rpc ListMonitoredResourceDescriptors(ListMonitoredResourceDescriptorsRequest) returns (ListMonitoredResourceDescriptorsResponse) { - option (google.api.http) = { - get: "/v3/{name=projects/*}/monitoredResourceDescriptors" - }; - } - - // Gets a single monitored resource descriptor. This method does not require a Stackdriver account. - rpc GetMonitoredResourceDescriptor(GetMonitoredResourceDescriptorRequest) returns (google.api.MonitoredResourceDescriptor) { - option (google.api.http) = { - get: "/v3/{name=projects/*/monitoredResourceDescriptors/*}" - }; - } - - // Lists metric descriptors that match a filter. This method does not require a Stackdriver account. - rpc ListMetricDescriptors(ListMetricDescriptorsRequest) returns (ListMetricDescriptorsResponse) { - option (google.api.http) = { - get: "/v3/{name=projects/*}/metricDescriptors" - }; - } - - // Gets a single metric descriptor. This method does not require a Stackdriver account. - rpc GetMetricDescriptor(GetMetricDescriptorRequest) returns (google.api.MetricDescriptor) { - option (google.api.http) = { - get: "/v3/{name=projects/*/metricDescriptors/**}" - }; - } - - // Creates a new metric descriptor. - // User-created metric descriptors define - // [custom metrics](/monitoring/custom-metrics). - rpc CreateMetricDescriptor(CreateMetricDescriptorRequest) returns (google.api.MetricDescriptor) { - option (google.api.http) = { - post: "/v3/{name=projects/*}/metricDescriptors" - body: "metric_descriptor" - }; - } - - // Deletes a metric descriptor. Only user-created - // [custom metrics](/monitoring/custom-metrics) can be deleted. - rpc DeleteMetricDescriptor(DeleteMetricDescriptorRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v3/{name=projects/*/metricDescriptors/**}" - }; - } - - // Lists time series that match a filter. This method does not require a Stackdriver account. - rpc ListTimeSeries(ListTimeSeriesRequest) returns (ListTimeSeriesResponse) { - option (google.api.http) = { - get: "/v3/{name=projects/*}/timeSeries" - }; - } - - // Creates or adds data to one or more time series. - // The response is empty if all time series in the request were written. - // If any time series could not be written, a corresponding failure message is - // included in the error response. - rpc CreateTimeSeries(CreateTimeSeriesRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v3/{name=projects/*}/timeSeries" - body: "*" - }; - } -} - -// The `ListMonitoredResourceDescriptors` request. -message ListMonitoredResourceDescriptorsRequest { - // The project on which to execute the request. The format is - // `"projects/{project_id_or_number}"`. - string name = 5; - - // An optional [filter](/monitoring/api/v3/filters) describing - // the descriptors to be returned. The filter can reference - // the descriptor's type and labels. For example, the - // following filter returns only Google Compute Engine descriptors - // that have an `id` label: - // - // resource.type = starts_with("gce_") AND resource.label:id - string filter = 2; - - // A positive number that is the maximum number of results to return. - int32 page_size = 3; - - // If this field is not empty then it must contain the `nextPageToken` value - // returned by a previous call to this method. Using this field causes the - // method to return additional results from the previous method call. - string page_token = 4; -} - -// The `ListMonitoredResourceDescriptors` response. -message ListMonitoredResourceDescriptorsResponse { - // The monitored resource descriptors that are available to this project - // and that match `filter`, if present. - repeated google.api.MonitoredResourceDescriptor resource_descriptors = 1; - - // If there are more results than have been returned, then this field is set - // to a non-empty value. To see the additional results, - // use that value as `pageToken` in the next call to this method. - string next_page_token = 2; -} - -// The `GetMonitoredResourceDescriptor` request. -message GetMonitoredResourceDescriptorRequest { - // The monitored resource descriptor to get. The format is - // `"projects/{project_id_or_number}/monitoredResourceDescriptors/{resource_type}"`. - // The `{resource_type}` is a predefined type, such as - // `cloudsql_database`. - string name = 3; -} - -// The `ListMetricDescriptors` request. -message ListMetricDescriptorsRequest { - // The project on which to execute the request. The format is - // `"projects/{project_id_or_number}"`. - string name = 5; - - // If this field is empty, all custom and - // system-defined metric descriptors are returned. - // Otherwise, the [filter](/monitoring/api/v3/filters) - // specifies which metric descriptors are to be - // returned. For example, the following filter matches all - // [custom metrics](/monitoring/custom-metrics): - // - // metric.type = starts_with("custom.googleapis.com/") - string filter = 2; - - // A positive number that is the maximum number of results to return. - int32 page_size = 3; - - // If this field is not empty then it must contain the `nextPageToken` value - // returned by a previous call to this method. Using this field causes the - // method to return additional results from the previous method call. - string page_token = 4; -} - -// The `ListMetricDescriptors` response. -message ListMetricDescriptorsResponse { - // The metric descriptors that are available to the project - // and that match the value of `filter`, if present. - repeated google.api.MetricDescriptor metric_descriptors = 1; - - // If there are more results than have been returned, then this field is set - // to a non-empty value. To see the additional results, - // use that value as `pageToken` in the next call to this method. - string next_page_token = 2; -} - -// The `GetMetricDescriptor` request. -message GetMetricDescriptorRequest { - // The metric descriptor on which to execute the request. The format is - // `"projects/{project_id_or_number}/metricDescriptors/{metric_id}"`. - // An example value of `{metric_id}` is - // `"compute.googleapis.com/instance/disk/read_bytes_count"`. - string name = 3; -} - -// The `CreateMetricDescriptor` request. -message CreateMetricDescriptorRequest { - // The project on which to execute the request. The format is - // `"projects/{project_id_or_number}"`. - string name = 3; - - // The new [custom metric](/monitoring/custom-metrics) - // descriptor. - google.api.MetricDescriptor metric_descriptor = 2; -} - -// The `DeleteMetricDescriptor` request. -message DeleteMetricDescriptorRequest { - // The metric descriptor on which to execute the request. The format is - // `"projects/{project_id_or_number}/metricDescriptors/{metric_id}"`. - // An example of `{metric_id}` is: - // `"custom.googleapis.com/my_test_metric"`. - string name = 3; -} - -// The `ListTimeSeries` request. -message ListTimeSeriesRequest { - // Controls which fields are returned by `ListTimeSeries`. - enum TimeSeriesView { - // Returns the identity of the metric(s), the time series, - // and the time series data. - FULL = 0; - - // Returns the identity of the metric and the time series resource, - // but not the time series data. - HEADERS = 1; - } - - // The project on which to execute the request. The format is - // "projects/{project_id_or_number}". - string name = 10; - - // A [monitoring filter](/monitoring/api/v3/filters) that specifies which time - // series should be returned. The filter must specify a single metric type, - // and can additionally specify metric labels and other information. For - // example: - // - // metric.type = "compute.googleapis.com/instance/cpu/usage_time" AND - // metric.labels.instance_name = "my-instance-name" - string filter = 2; - - // The time interval for which results should be returned. Only time series - // that contain data points in the specified interval are included - // in the response. - TimeInterval interval = 4; - - // Specifies the alignment of data points in individual time series as - // well as how to combine the retrieved time series across specified labels. - // - // By default (if no `aggregation` is explicitly specified), the raw time - // series data is returned. - Aggregation aggregation = 5; - - // Unsupported: must be left blank. The points in each time series are - // currently returned in reverse time order (most recent to oldest). - string order_by = 6; - - // Specifies which information is returned about the time series. - TimeSeriesView view = 7; - - // A positive number that is the maximum number of results to return. If - // `page_size` is empty or more than 100,000 results, the effective - // `page_size` is 100,000 results. If `view` is set to `FULL`, this is the - // maximum number of `Points` returned. If `view` is set to `HEADERS`, this is - // the maximum number of `TimeSeries` returned. - int32 page_size = 8; - - // If this field is not empty then it must contain the `nextPageToken` value - // returned by a previous call to this method. Using this field causes the - // method to return additional results from the previous method call. - string page_token = 9; -} - -// The `ListTimeSeries` response. -message ListTimeSeriesResponse { - // One or more time series that match the filter included in the request. - repeated TimeSeries time_series = 1; - - // If there are more results than have been returned, then this field is set - // to a non-empty value. To see the additional results, - // use that value as `pageToken` in the next call to this method. - string next_page_token = 2; - - // Query execution errors that may have caused the time series data returned - // to be incomplete. - repeated google.rpc.Status execution_errors = 3; -} - -// The `CreateTimeSeries` request. -message CreateTimeSeriesRequest { - // The project on which to execute the request. The format is - // `"projects/{project_id_or_number}"`. - string name = 3; - - // The new data to be added to a list of time series. - // Adds at most one data point to each of several time series. The new data - // point must be more recent than any other point in its time series. Each - // `TimeSeries` value must fully specify a unique time series by supplying - // all label values for the metric and the monitored resource. - // - // The maximum number of `TimeSeries` objects per `Create` request is 200. - repeated TimeSeries time_series = 2; -} - -// DEPRECATED. Used to hold per-time-series error status. -message CreateTimeSeriesError { - // DEPRECATED. Time series ID that resulted in the `status` error. - TimeSeries time_series = 1 [deprecated = true]; - - // DEPRECATED. The status of the requested write operation for `time_series`. - google.rpc.Status status = 2 [deprecated = true]; -} - -// Summary of the result of a failed request to write data to a time series. -message CreateTimeSeriesSummary { - // Detailed information about an error category. - message Error { - // The status of the requested write operation. - google.rpc.Status status = 1; - - // The number of points that couldn't be written because of `status`. - int32 point_count = 2; - } - - // The number of points in the request. - int32 total_point_count = 1; - - // The number of points that were successfully written. - int32 success_point_count = 2; - - // The number of points that failed to be written. Order is not guaranteed. - repeated Error errors = 3; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/metric_service_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/metric_service_pb2.py deleted file mode 100644 index 35069366b6d1..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/metric_service_pb2.py +++ /dev/null @@ -1,1613 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/metric_service.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import metric_pb2 as google_dot_api_dot_metric__pb2 -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.cloud.monitoring_v3.proto import ( - alert_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2, -) -from google.cloud.monitoring_v3.proto import ( - common_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2, -) -from google.cloud.monitoring_v3.proto import ( - metric_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/metric_service.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\022MetricServiceProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n5google/cloud/monitoring_v3/proto/metric_service.proto\x12\x14google.monitoring.v3\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/metric.proto\x1a#google/api/monitored_resource.proto\x1a,google/cloud/monitoring_v3/proto/alert.proto\x1a-google/cloud/monitoring_v3/proto/common.proto\x1a-google/cloud/monitoring_v3/proto/metric.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x17google/rpc/status.proto\x1a\x17google/api/client.proto"n\n\'ListMonitoredResourceDescriptorsRequest\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"5\n%GetMonitoredResourceDescriptorRequest\x12\x0c\n\x04name\x18\x03 \x01(\t"c\n\x1cListMetricDescriptorsRequest\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"r\n\x1dListMetricDescriptorsResponse\x12\x38\n\x12metric_descriptors\x18\x01 \x03(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x1aGetMetricDescriptorRequest\x12\x0c\n\x04name\x18\x03 \x01(\t"f\n\x1d\x43reateMetricDescriptorRequest\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x02 \x01(\x0b\x32\x1c.google.api.MetricDescriptor"-\n\x1d\x44\x65leteMetricDescriptorRequest\x12\x0c\n\x04name\x18\x03 \x01(\t"\xcf\x02\n\x15ListTimeSeriesRequest\x12\x0c\n\x04name\x18\n \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x34\n\x08interval\x18\x04 \x01(\x0b\x32".google.monitoring.v3.TimeInterval\x12\x36\n\x0b\x61ggregation\x18\x05 \x01(\x0b\x32!.google.monitoring.v3.Aggregation\x12\x10\n\x08order_by\x18\x06 \x01(\t\x12H\n\x04view\x18\x07 \x01(\x0e\x32:.google.monitoring.v3.ListTimeSeriesRequest.TimeSeriesView\x12\x11\n\tpage_size\x18\x08 \x01(\x05\x12\x12\n\npage_token\x18\t \x01(\t"\'\n\x0eTimeSeriesView\x12\x08\n\x04\x46ULL\x10\x00\x12\x0b\n\x07HEADERS\x10\x01"\x96\x01\n\x16ListTimeSeriesResponse\x12\x35\n\x0btime_series\x18\x01 \x03(\x0b\x32 .google.monitoring.v3.TimeSeries\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12,\n\x10\x65xecution_errors\x18\x03 \x03(\x0b\x32\x12.google.rpc.Status"^\n\x17\x43reateTimeSeriesRequest\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x35\n\x0btime_series\x18\x02 \x03(\x0b\x32 .google.monitoring.v3.TimeSeries"z\n\x15\x43reateTimeSeriesError\x12\x39\n\x0btime_series\x18\x01 \x01(\x0b\x32 .google.monitoring.v3.TimeSeriesB\x02\x18\x01\x12&\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.StatusB\x02\x18\x01"\xd8\x01\n\x17\x43reateTimeSeriesSummary\x12\x19\n\x11total_point_count\x18\x01 \x01(\x05\x12\x1b\n\x13success_point_count\x18\x02 \x01(\x05\x12\x43\n\x06\x65rrors\x18\x03 \x03(\x0b\x32\x33.google.monitoring.v3.CreateTimeSeriesSummary.Error\x1a@\n\x05\x45rror\x12"\n\x06status\x18\x01 \x01(\x0b\x32\x12.google.rpc.Status\x12\x13\n\x0bpoint_count\x18\x02 \x01(\x05\x32\xd2\x0c\n\rMetricService\x12\xdd\x01\n ListMonitoredResourceDescriptors\x12=.google.monitoring.v3.ListMonitoredResourceDescriptorsRequest\x1a>.google.monitoring.v3.ListMonitoredResourceDescriptorsResponse":\x82\xd3\xe4\x93\x02\x34\x12\x32/v3/{name=projects/*}/monitoredResourceDescriptors\x12\xc4\x01\n\x1eGetMonitoredResourceDescriptor\x12;.google.monitoring.v3.GetMonitoredResourceDescriptorRequest\x1a\'.google.api.MonitoredResourceDescriptor"<\x82\xd3\xe4\x93\x02\x36\x12\x34/v3/{name=projects/*/monitoredResourceDescriptors/*}\x12\xb1\x01\n\x15ListMetricDescriptors\x12\x32.google.monitoring.v3.ListMetricDescriptorsRequest\x1a\x33.google.monitoring.v3.ListMetricDescriptorsResponse"/\x82\xd3\xe4\x93\x02)\x12\'/v3/{name=projects/*}/metricDescriptors\x12\x99\x01\n\x13GetMetricDescriptor\x12\x30.google.monitoring.v3.GetMetricDescriptorRequest\x1a\x1c.google.api.MetricDescriptor"2\x82\xd3\xe4\x93\x02,\x12*/v3/{name=projects/*/metricDescriptors/**}\x12\xaf\x01\n\x16\x43reateMetricDescriptor\x12\x33.google.monitoring.v3.CreateMetricDescriptorRequest\x1a\x1c.google.api.MetricDescriptor"B\x82\xd3\xe4\x93\x02<"\'/v3/{name=projects/*}/metricDescriptors:\x11metric_descriptor\x12\x99\x01\n\x16\x44\x65leteMetricDescriptor\x12\x33.google.monitoring.v3.DeleteMetricDescriptorRequest\x1a\x16.google.protobuf.Empty"2\x82\xd3\xe4\x93\x02,**/v3/{name=projects/*/metricDescriptors/**}\x12\x95\x01\n\x0eListTimeSeries\x12+.google.monitoring.v3.ListTimeSeriesRequest\x1a,.google.monitoring.v3.ListTimeSeriesResponse"(\x82\xd3\xe4\x93\x02"\x12 /v3/{name=projects/*}/timeSeries\x12\x86\x01\n\x10\x43reateTimeSeries\x12-.google.monitoring.v3.CreateTimeSeriesRequest\x1a\x16.google.protobuf.Empty"+\x82\xd3\xe4\x93\x02%" /v3/{name=projects/*}/timeSeries:\x01*\x1a\xda\x01\xca\x41\x19monitoring.googleapis.com\xd2\x41\xba\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.read,https://www.googleapis.com/auth/monitoring.writeB\xaa\x01\n\x18\x63om.google.monitoring.v3B\x12MetricServiceProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_metric__pb2.DESCRIPTOR, - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_alert__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_LISTTIMESERIESREQUEST_TIMESERIESVIEW = _descriptor.EnumDescriptor( - name="TimeSeriesView", - full_name="google.monitoring.v3.ListTimeSeriesRequest.TimeSeriesView", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="FULL", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="HEADERS", index=1, number=1, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1439, - serialized_end=1478, -) -_sym_db.RegisterEnumDescriptor(_LISTTIMESERIESREQUEST_TIMESERIESVIEW) - - -_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST = _descriptor.Descriptor( - name="ListMonitoredResourceDescriptorsRequest", - full_name="google.monitoring.v3.ListMonitoredResourceDescriptorsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.ListMonitoredResourceDescriptorsRequest.name", - index=0, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.monitoring.v3.ListMonitoredResourceDescriptorsRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.monitoring.v3.ListMonitoredResourceDescriptorsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.monitoring.v3.ListMonitoredResourceDescriptorsRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=422, - serialized_end=532, -) - - -_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE = _descriptor.Descriptor( - name="ListMonitoredResourceDescriptorsResponse", - full_name="google.monitoring.v3.ListMonitoredResourceDescriptorsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="resource_descriptors", - full_name="google.monitoring.v3.ListMonitoredResourceDescriptorsResponse.resource_descriptors", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.monitoring.v3.ListMonitoredResourceDescriptorsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=535, - serialized_end=673, -) - - -_GETMONITOREDRESOURCEDESCRIPTORREQUEST = _descriptor.Descriptor( - name="GetMonitoredResourceDescriptorRequest", - full_name="google.monitoring.v3.GetMonitoredResourceDescriptorRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.GetMonitoredResourceDescriptorRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=675, - serialized_end=728, -) - - -_LISTMETRICDESCRIPTORSREQUEST = _descriptor.Descriptor( - name="ListMetricDescriptorsRequest", - full_name="google.monitoring.v3.ListMetricDescriptorsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.ListMetricDescriptorsRequest.name", - index=0, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.monitoring.v3.ListMetricDescriptorsRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.monitoring.v3.ListMetricDescriptorsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.monitoring.v3.ListMetricDescriptorsRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=730, - serialized_end=829, -) - - -_LISTMETRICDESCRIPTORSRESPONSE = _descriptor.Descriptor( - name="ListMetricDescriptorsResponse", - full_name="google.monitoring.v3.ListMetricDescriptorsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric_descriptors", - full_name="google.monitoring.v3.ListMetricDescriptorsResponse.metric_descriptors", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.monitoring.v3.ListMetricDescriptorsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=831, - serialized_end=945, -) - - -_GETMETRICDESCRIPTORREQUEST = _descriptor.Descriptor( - name="GetMetricDescriptorRequest", - full_name="google.monitoring.v3.GetMetricDescriptorRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.GetMetricDescriptorRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=947, - serialized_end=989, -) - - -_CREATEMETRICDESCRIPTORREQUEST = _descriptor.Descriptor( - name="CreateMetricDescriptorRequest", - full_name="google.monitoring.v3.CreateMetricDescriptorRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.CreateMetricDescriptorRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric_descriptor", - full_name="google.monitoring.v3.CreateMetricDescriptorRequest.metric_descriptor", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=991, - serialized_end=1093, -) - - -_DELETEMETRICDESCRIPTORREQUEST = _descriptor.Descriptor( - name="DeleteMetricDescriptorRequest", - full_name="google.monitoring.v3.DeleteMetricDescriptorRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.DeleteMetricDescriptorRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1095, - serialized_end=1140, -) - - -_LISTTIMESERIESREQUEST = _descriptor.Descriptor( - name="ListTimeSeriesRequest", - full_name="google.monitoring.v3.ListTimeSeriesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.ListTimeSeriesRequest.name", - index=0, - number=10, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.monitoring.v3.ListTimeSeriesRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="interval", - full_name="google.monitoring.v3.ListTimeSeriesRequest.interval", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="aggregation", - full_name="google.monitoring.v3.ListTimeSeriesRequest.aggregation", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="order_by", - full_name="google.monitoring.v3.ListTimeSeriesRequest.order_by", - index=4, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="view", - full_name="google.monitoring.v3.ListTimeSeriesRequest.view", - index=5, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.monitoring.v3.ListTimeSeriesRequest.page_size", - index=6, - number=8, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.monitoring.v3.ListTimeSeriesRequest.page_token", - index=7, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_LISTTIMESERIESREQUEST_TIMESERIESVIEW], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1143, - serialized_end=1478, -) - - -_LISTTIMESERIESRESPONSE = _descriptor.Descriptor( - name="ListTimeSeriesResponse", - full_name="google.monitoring.v3.ListTimeSeriesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="time_series", - full_name="google.monitoring.v3.ListTimeSeriesResponse.time_series", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.monitoring.v3.ListTimeSeriesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="execution_errors", - full_name="google.monitoring.v3.ListTimeSeriesResponse.execution_errors", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1481, - serialized_end=1631, -) - - -_CREATETIMESERIESREQUEST = _descriptor.Descriptor( - name="CreateTimeSeriesRequest", - full_name="google.monitoring.v3.CreateTimeSeriesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.CreateTimeSeriesRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time_series", - full_name="google.monitoring.v3.CreateTimeSeriesRequest.time_series", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1633, - serialized_end=1727, -) - - -_CREATETIMESERIESERROR = _descriptor.Descriptor( - name="CreateTimeSeriesError", - full_name="google.monitoring.v3.CreateTimeSeriesError", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="time_series", - full_name="google.monitoring.v3.CreateTimeSeriesError.time_series", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status", - full_name="google.monitoring.v3.CreateTimeSeriesError.status", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1729, - serialized_end=1851, -) - - -_CREATETIMESERIESSUMMARY_ERROR = _descriptor.Descriptor( - name="Error", - full_name="google.monitoring.v3.CreateTimeSeriesSummary.Error", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="status", - full_name="google.monitoring.v3.CreateTimeSeriesSummary.Error.status", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="point_count", - full_name="google.monitoring.v3.CreateTimeSeriesSummary.Error.point_count", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2006, - serialized_end=2070, -) - -_CREATETIMESERIESSUMMARY = _descriptor.Descriptor( - name="CreateTimeSeriesSummary", - full_name="google.monitoring.v3.CreateTimeSeriesSummary", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="total_point_count", - full_name="google.monitoring.v3.CreateTimeSeriesSummary.total_point_count", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="success_point_count", - full_name="google.monitoring.v3.CreateTimeSeriesSummary.success_point_count", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="errors", - full_name="google.monitoring.v3.CreateTimeSeriesSummary.errors", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_CREATETIMESERIESSUMMARY_ERROR], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1854, - serialized_end=2070, -) - -_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE.fields_by_name[ - "resource_descriptors" -].message_type = ( - google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEDESCRIPTOR -) -_LISTMETRICDESCRIPTORSRESPONSE.fields_by_name[ - "metric_descriptors" -].message_type = google_dot_api_dot_metric__pb2._METRICDESCRIPTOR -_CREATEMETRICDESCRIPTORREQUEST.fields_by_name[ - "metric_descriptor" -].message_type = google_dot_api_dot_metric__pb2._METRICDESCRIPTOR -_LISTTIMESERIESREQUEST.fields_by_name[ - "interval" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2._TIMEINTERVAL -) -_LISTTIMESERIESREQUEST.fields_by_name[ - "aggregation" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2._AGGREGATION -) -_LISTTIMESERIESREQUEST.fields_by_name[ - "view" -].enum_type = _LISTTIMESERIESREQUEST_TIMESERIESVIEW -_LISTTIMESERIESREQUEST_TIMESERIESVIEW.containing_type = _LISTTIMESERIESREQUEST -_LISTTIMESERIESRESPONSE.fields_by_name[ - "time_series" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__pb2._TIMESERIES -) -_LISTTIMESERIESRESPONSE.fields_by_name[ - "execution_errors" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_CREATETIMESERIESREQUEST.fields_by_name[ - "time_series" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__pb2._TIMESERIES -) -_CREATETIMESERIESERROR.fields_by_name[ - "time_series" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__pb2._TIMESERIES -) -_CREATETIMESERIESERROR.fields_by_name[ - "status" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_CREATETIMESERIESSUMMARY_ERROR.fields_by_name[ - "status" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_CREATETIMESERIESSUMMARY_ERROR.containing_type = _CREATETIMESERIESSUMMARY -_CREATETIMESERIESSUMMARY.fields_by_name[ - "errors" -].message_type = _CREATETIMESERIESSUMMARY_ERROR -DESCRIPTOR.message_types_by_name[ - "ListMonitoredResourceDescriptorsRequest" -] = _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListMonitoredResourceDescriptorsResponse" -] = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE -DESCRIPTOR.message_types_by_name[ - "GetMonitoredResourceDescriptorRequest" -] = _GETMONITOREDRESOURCEDESCRIPTORREQUEST -DESCRIPTOR.message_types_by_name[ - "ListMetricDescriptorsRequest" -] = _LISTMETRICDESCRIPTORSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListMetricDescriptorsResponse" -] = _LISTMETRICDESCRIPTORSRESPONSE -DESCRIPTOR.message_types_by_name[ - "GetMetricDescriptorRequest" -] = _GETMETRICDESCRIPTORREQUEST -DESCRIPTOR.message_types_by_name[ - "CreateMetricDescriptorRequest" -] = _CREATEMETRICDESCRIPTORREQUEST -DESCRIPTOR.message_types_by_name[ - "DeleteMetricDescriptorRequest" -] = _DELETEMETRICDESCRIPTORREQUEST -DESCRIPTOR.message_types_by_name["ListTimeSeriesRequest"] = _LISTTIMESERIESREQUEST -DESCRIPTOR.message_types_by_name["ListTimeSeriesResponse"] = _LISTTIMESERIESRESPONSE -DESCRIPTOR.message_types_by_name["CreateTimeSeriesRequest"] = _CREATETIMESERIESREQUEST -DESCRIPTOR.message_types_by_name["CreateTimeSeriesError"] = _CREATETIMESERIESERROR -DESCRIPTOR.message_types_by_name["CreateTimeSeriesSummary"] = _CREATETIMESERIESSUMMARY -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ListMonitoredResourceDescriptorsRequest = _reflection.GeneratedProtocolMessageType( - "ListMonitoredResourceDescriptorsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""The ``ListMonitoredResourceDescriptors`` request. - - - Attributes: - name: - The project on which to execute the request. The format is - ``"projects/{project_id_or_number}"``. - filter: - An optional `filter `__ describing - the descriptors to be returned. The filter can reference the - descriptor's type and labels. For example, the following - filter returns only Google Compute Engine descriptors that - have an ``id`` label:: - - resource.type = starts_with("gce_") AND resource.label:id - - page_size: - A positive number that is the maximum number of results to - return. - page_token: - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListMonitoredResourceDescriptorsRequest) - ), -) -_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsRequest) - -ListMonitoredResourceDescriptorsResponse = _reflection.GeneratedProtocolMessageType( - "ListMonitoredResourceDescriptorsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""The ``ListMonitoredResourceDescriptors`` response. - - - Attributes: - resource_descriptors: - The monitored resource descriptors that are available to this - project and that match ``filter``, if present. - next_page_token: - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``pageToken`` in the next call to - this method. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListMonitoredResourceDescriptorsResponse) - ), -) -_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsResponse) - -GetMonitoredResourceDescriptorRequest = _reflection.GeneratedProtocolMessageType( - "GetMonitoredResourceDescriptorRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETMONITOREDRESOURCEDESCRIPTORREQUEST, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""The ``GetMonitoredResourceDescriptor`` request. - - - Attributes: - name: - The monitored resource descriptor to get. The format is ``"pro - jects/{project_id_or_number}/monitoredResourceDescriptors/{res - ource_type}"``. The ``{resource_type}`` is a predefined type, - such as ``cloudsql_database``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.GetMonitoredResourceDescriptorRequest) - ), -) -_sym_db.RegisterMessage(GetMonitoredResourceDescriptorRequest) - -ListMetricDescriptorsRequest = _reflection.GeneratedProtocolMessageType( - "ListMetricDescriptorsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMETRICDESCRIPTORSREQUEST, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""The ``ListMetricDescriptors`` request. - - - Attributes: - name: - The project on which to execute the request. The format is - ``"projects/{project_id_or_number}"``. - filter: - If this field is empty, all custom and system-defined metric - descriptors are returned. Otherwise, the `filter - `__ specifies which metric - descriptors are to be returned. For example, the following - filter matches all `custom metrics `__: :: metric.type = - starts_with("custom.googleapis.com/") - page_size: - A positive number that is the maximum number of results to - return. - page_token: - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListMetricDescriptorsRequest) - ), -) -_sym_db.RegisterMessage(ListMetricDescriptorsRequest) - -ListMetricDescriptorsResponse = _reflection.GeneratedProtocolMessageType( - "ListMetricDescriptorsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMETRICDESCRIPTORSRESPONSE, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""The ``ListMetricDescriptors`` response. - - - Attributes: - metric_descriptors: - The metric descriptors that are available to the project and - that match the value of ``filter``, if present. - next_page_token: - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``pageToken`` in the next call to - this method. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListMetricDescriptorsResponse) - ), -) -_sym_db.RegisterMessage(ListMetricDescriptorsResponse) - -GetMetricDescriptorRequest = _reflection.GeneratedProtocolMessageType( - "GetMetricDescriptorRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETMETRICDESCRIPTORREQUEST, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""The ``GetMetricDescriptor`` request. - - - Attributes: - name: - The metric descriptor on which to execute the request. The - format is ``"projects/{project_id_or_number}/metricDescriptors - /{metric_id}"``. An example value of ``{metric_id}`` is - ``"compute.googleapis.com/instance/disk/read_bytes_count"``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.GetMetricDescriptorRequest) - ), -) -_sym_db.RegisterMessage(GetMetricDescriptorRequest) - -CreateMetricDescriptorRequest = _reflection.GeneratedProtocolMessageType( - "CreateMetricDescriptorRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATEMETRICDESCRIPTORREQUEST, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""The ``CreateMetricDescriptor`` request. - - - Attributes: - name: - The project on which to execute the request. The format is - ``"projects/{project_id_or_number}"``. - metric_descriptor: - The new `custom metric `__ - descriptor. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateMetricDescriptorRequest) - ), -) -_sym_db.RegisterMessage(CreateMetricDescriptorRequest) - -DeleteMetricDescriptorRequest = _reflection.GeneratedProtocolMessageType( - "DeleteMetricDescriptorRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEMETRICDESCRIPTORREQUEST, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""The ``DeleteMetricDescriptor`` request. - - - Attributes: - name: - The metric descriptor on which to execute the request. The - format is ``"projects/{project_id_or_number}/metricDescriptors - /{metric_id}"``. An example of ``{metric_id}`` is: - ``"custom.googleapis.com/my_test_metric"``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.DeleteMetricDescriptorRequest) - ), -) -_sym_db.RegisterMessage(DeleteMetricDescriptorRequest) - -ListTimeSeriesRequest = _reflection.GeneratedProtocolMessageType( - "ListTimeSeriesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTTIMESERIESREQUEST, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""The ``ListTimeSeries`` request. - - - Attributes: - name: - The project on which to execute the request. The format is - "projects/{project\_id\_or\_number}". - filter: - A `monitoring filter `__ that - specifies which time series should be returned. The filter - must specify a single metric type, and can additionally - specify metric labels and other information. For example: :: - metric.type = "compute.googleapis.com/instance/cpu/usage_time" - AND metric.labels.instance_name = "my-instance-name" - interval: - The time interval for which results should be returned. Only - time series that contain data points in the specified interval - are included in the response. - aggregation: - Specifies the alignment of data points in individual time - series as well as how to combine the retrieved time series - across specified labels. By default (if no ``aggregation`` is - explicitly specified), the raw time series data is returned. - order_by: - Unsupported: must be left blank. The points in each time - series are currently returned in reverse time order (most - recent to oldest). - view: - Specifies which information is returned about the time series. - page_size: - A positive number that is the maximum number of results to - return. If ``page_size`` is empty or more than 100,000 - results, the effective ``page_size`` is 100,000 results. If - ``view`` is set to ``FULL``, this is the maximum number of - ``Points`` returned. If ``view`` is set to ``HEADERS``, this - is the maximum number of ``TimeSeries`` returned. - page_token: - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListTimeSeriesRequest) - ), -) -_sym_db.RegisterMessage(ListTimeSeriesRequest) - -ListTimeSeriesResponse = _reflection.GeneratedProtocolMessageType( - "ListTimeSeriesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTTIMESERIESRESPONSE, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""The ``ListTimeSeries`` response. - - - Attributes: - time_series: - One or more time series that match the filter included in the - request. - next_page_token: - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``pageToken`` in the next call to - this method. - execution_errors: - Query execution errors that may have caused the time series - data returned to be incomplete. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListTimeSeriesResponse) - ), -) -_sym_db.RegisterMessage(ListTimeSeriesResponse) - -CreateTimeSeriesRequest = _reflection.GeneratedProtocolMessageType( - "CreateTimeSeriesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATETIMESERIESREQUEST, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""The ``CreateTimeSeries`` request. - - - Attributes: - name: - The project on which to execute the request. The format is - ``"projects/{project_id_or_number}"``. - time_series: - The new data to be added to a list of time series. Adds at - most one data point to each of several time series. The new - data point must be more recent than any other point in its - time series. Each ``TimeSeries`` value must fully specify a - unique time series by supplying all label values for the - metric and the monitored resource. The maximum number of - ``TimeSeries`` objects per ``Create`` request is 200. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateTimeSeriesRequest) - ), -) -_sym_db.RegisterMessage(CreateTimeSeriesRequest) - -CreateTimeSeriesError = _reflection.GeneratedProtocolMessageType( - "CreateTimeSeriesError", - (_message.Message,), - dict( - DESCRIPTOR=_CREATETIMESERIESERROR, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""DEPRECATED. Used to hold per-time-series error status. - - - Attributes: - time_series: - DEPRECATED. Time series ID that resulted in the ``status`` - error. - status: - DEPRECATED. The status of the requested write operation for - ``time_series``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateTimeSeriesError) - ), -) -_sym_db.RegisterMessage(CreateTimeSeriesError) - -CreateTimeSeriesSummary = _reflection.GeneratedProtocolMessageType( - "CreateTimeSeriesSummary", - (_message.Message,), - dict( - Error=_reflection.GeneratedProtocolMessageType( - "Error", - (_message.Message,), - dict( - DESCRIPTOR=_CREATETIMESERIESSUMMARY_ERROR, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""Detailed information about an error category. - - - Attributes: - status: - The status of the requested write operation. - point_count: - The number of points that couldn't be written because of - ``status``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateTimeSeriesSummary.Error) - ), - ), - DESCRIPTOR=_CREATETIMESERIESSUMMARY, - __module__="google.cloud.monitoring_v3.proto.metric_service_pb2", - __doc__="""Summary of the result of a failed request to write data to - a time series. - - - Attributes: - total_point_count: - The number of points in the request. - success_point_count: - The number of points that were successfully written. - errors: - The number of points that failed to be written. Order is not - guaranteed. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateTimeSeriesSummary) - ), -) -_sym_db.RegisterMessage(CreateTimeSeriesSummary) -_sym_db.RegisterMessage(CreateTimeSeriesSummary.Error) - - -DESCRIPTOR._options = None -_CREATETIMESERIESERROR.fields_by_name["time_series"]._options = None -_CREATETIMESERIESERROR.fields_by_name["status"]._options = None - -_METRICSERVICE = _descriptor.ServiceDescriptor( - name="MetricService", - full_name="google.monitoring.v3.MetricService", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\031monitoring.googleapis.com\322A\272\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.read,https://www.googleapis.com/auth/monitoring.write" - ), - serialized_start=2073, - serialized_end=3691, - methods=[ - _descriptor.MethodDescriptor( - name="ListMonitoredResourceDescriptors", - full_name="google.monitoring.v3.MetricService.ListMonitoredResourceDescriptors", - index=0, - containing_service=None, - input_type=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, - output_type=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, - serialized_options=_b( - "\202\323\344\223\0024\0222/v3/{name=projects/*}/monitoredResourceDescriptors" - ), - ), - _descriptor.MethodDescriptor( - name="GetMonitoredResourceDescriptor", - full_name="google.monitoring.v3.MetricService.GetMonitoredResourceDescriptor", - index=1, - containing_service=None, - input_type=_GETMONITOREDRESOURCEDESCRIPTORREQUEST, - output_type=google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEDESCRIPTOR, - serialized_options=_b( - "\202\323\344\223\0026\0224/v3/{name=projects/*/monitoredResourceDescriptors/*}" - ), - ), - _descriptor.MethodDescriptor( - name="ListMetricDescriptors", - full_name="google.monitoring.v3.MetricService.ListMetricDescriptors", - index=2, - containing_service=None, - input_type=_LISTMETRICDESCRIPTORSREQUEST, - output_type=_LISTMETRICDESCRIPTORSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002)\022'/v3/{name=projects/*}/metricDescriptors" - ), - ), - _descriptor.MethodDescriptor( - name="GetMetricDescriptor", - full_name="google.monitoring.v3.MetricService.GetMetricDescriptor", - index=3, - containing_service=None, - input_type=_GETMETRICDESCRIPTORREQUEST, - output_type=google_dot_api_dot_metric__pb2._METRICDESCRIPTOR, - serialized_options=_b( - "\202\323\344\223\002,\022*/v3/{name=projects/*/metricDescriptors/**}" - ), - ), - _descriptor.MethodDescriptor( - name="CreateMetricDescriptor", - full_name="google.monitoring.v3.MetricService.CreateMetricDescriptor", - index=4, - containing_service=None, - input_type=_CREATEMETRICDESCRIPTORREQUEST, - output_type=google_dot_api_dot_metric__pb2._METRICDESCRIPTOR, - serialized_options=_b( - "\202\323\344\223\002<\"'/v3/{name=projects/*}/metricDescriptors:\021metric_descriptor" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteMetricDescriptor", - full_name="google.monitoring.v3.MetricService.DeleteMetricDescriptor", - index=5, - containing_service=None, - input_type=_DELETEMETRICDESCRIPTORREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002,**/v3/{name=projects/*/metricDescriptors/**}" - ), - ), - _descriptor.MethodDescriptor( - name="ListTimeSeries", - full_name="google.monitoring.v3.MetricService.ListTimeSeries", - index=6, - containing_service=None, - input_type=_LISTTIMESERIESREQUEST, - output_type=_LISTTIMESERIESRESPONSE, - serialized_options=_b( - '\202\323\344\223\002"\022 /v3/{name=projects/*}/timeSeries' - ), - ), - _descriptor.MethodDescriptor( - name="CreateTimeSeries", - full_name="google.monitoring.v3.MetricService.CreateTimeSeries", - index=7, - containing_service=None, - input_type=_CREATETIMESERIESREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002%" /v3/{name=projects/*}/timeSeries:\001*' - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_METRICSERVICE) - -DESCRIPTOR.services_by_name["MetricService"] = _METRICSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/metric_service_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/metric_service_pb2_grpc.py deleted file mode 100644 index 581a79532e7e..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/metric_service_pb2_grpc.py +++ /dev/null @@ -1,181 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.api import metric_pb2 as google_dot_api_dot_metric__pb2 -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.cloud.monitoring_v3.proto import ( - metric_service_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class MetricServiceStub(object): - """Manages metric descriptors, monitored resource descriptors, and - time series data. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListMonitoredResourceDescriptors = channel.unary_unary( - "/google.monitoring.v3.MetricService/ListMonitoredResourceDescriptors", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.ListMonitoredResourceDescriptorsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.ListMonitoredResourceDescriptorsResponse.FromString, - ) - self.GetMonitoredResourceDescriptor = channel.unary_unary( - "/google.monitoring.v3.MetricService/GetMonitoredResourceDescriptor", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.GetMonitoredResourceDescriptorRequest.SerializeToString, - response_deserializer=google_dot_api_dot_monitored__resource__pb2.MonitoredResourceDescriptor.FromString, - ) - self.ListMetricDescriptors = channel.unary_unary( - "/google.monitoring.v3.MetricService/ListMetricDescriptors", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.ListMetricDescriptorsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.ListMetricDescriptorsResponse.FromString, - ) - self.GetMetricDescriptor = channel.unary_unary( - "/google.monitoring.v3.MetricService/GetMetricDescriptor", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.GetMetricDescriptorRequest.SerializeToString, - response_deserializer=google_dot_api_dot_metric__pb2.MetricDescriptor.FromString, - ) - self.CreateMetricDescriptor = channel.unary_unary( - "/google.monitoring.v3.MetricService/CreateMetricDescriptor", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.CreateMetricDescriptorRequest.SerializeToString, - response_deserializer=google_dot_api_dot_metric__pb2.MetricDescriptor.FromString, - ) - self.DeleteMetricDescriptor = channel.unary_unary( - "/google.monitoring.v3.MetricService/DeleteMetricDescriptor", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.DeleteMetricDescriptorRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ListTimeSeries = channel.unary_unary( - "/google.monitoring.v3.MetricService/ListTimeSeries", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.ListTimeSeriesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.ListTimeSeriesResponse.FromString, - ) - self.CreateTimeSeries = channel.unary_unary( - "/google.monitoring.v3.MetricService/CreateTimeSeries", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.CreateTimeSeriesRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - -class MetricServiceServicer(object): - """Manages metric descriptors, monitored resource descriptors, and - time series data. - """ - - def ListMonitoredResourceDescriptors(self, request, context): - """Lists monitored resource descriptors that match a filter. This method does not require a Stackdriver account. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetMonitoredResourceDescriptor(self, request, context): - """Gets a single monitored resource descriptor. This method does not require a Stackdriver account. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListMetricDescriptors(self, request, context): - """Lists metric descriptors that match a filter. This method does not require a Stackdriver account. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetMetricDescriptor(self, request, context): - """Gets a single metric descriptor. This method does not require a Stackdriver account. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateMetricDescriptor(self, request, context): - """Creates a new metric descriptor. - User-created metric descriptors define - [custom metrics](/monitoring/custom-metrics). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteMetricDescriptor(self, request, context): - """Deletes a metric descriptor. Only user-created - [custom metrics](/monitoring/custom-metrics) can be deleted. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTimeSeries(self, request, context): - """Lists time series that match a filter. This method does not require a Stackdriver account. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateTimeSeries(self, request, context): - """Creates or adds data to one or more time series. - The response is empty if all time series in the request were written. - If any time series could not be written, a corresponding failure message is - included in the error response. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_MetricServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "ListMonitoredResourceDescriptors": grpc.unary_unary_rpc_method_handler( - servicer.ListMonitoredResourceDescriptors, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.ListMonitoredResourceDescriptorsRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.ListMonitoredResourceDescriptorsResponse.SerializeToString, - ), - "GetMonitoredResourceDescriptor": grpc.unary_unary_rpc_method_handler( - servicer.GetMonitoredResourceDescriptor, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.GetMonitoredResourceDescriptorRequest.FromString, - response_serializer=google_dot_api_dot_monitored__resource__pb2.MonitoredResourceDescriptor.SerializeToString, - ), - "ListMetricDescriptors": grpc.unary_unary_rpc_method_handler( - servicer.ListMetricDescriptors, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.ListMetricDescriptorsRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.ListMetricDescriptorsResponse.SerializeToString, - ), - "GetMetricDescriptor": grpc.unary_unary_rpc_method_handler( - servicer.GetMetricDescriptor, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.GetMetricDescriptorRequest.FromString, - response_serializer=google_dot_api_dot_metric__pb2.MetricDescriptor.SerializeToString, - ), - "CreateMetricDescriptor": grpc.unary_unary_rpc_method_handler( - servicer.CreateMetricDescriptor, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.CreateMetricDescriptorRequest.FromString, - response_serializer=google_dot_api_dot_metric__pb2.MetricDescriptor.SerializeToString, - ), - "DeleteMetricDescriptor": grpc.unary_unary_rpc_method_handler( - servicer.DeleteMetricDescriptor, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.DeleteMetricDescriptorRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "ListTimeSeries": grpc.unary_unary_rpc_method_handler( - servicer.ListTimeSeries, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.ListTimeSeriesRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.ListTimeSeriesResponse.SerializeToString, - ), - "CreateTimeSeries": grpc.unary_unary_rpc_method_handler( - servicer.CreateTimeSeries, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_metric__service__pb2.CreateTimeSeriesRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.monitoring.v3.MetricService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/monitoring/google/cloud/monitoring_v3/proto/mutation_record.proto b/monitoring/google/cloud/monitoring_v3/proto/mutation_record.proto deleted file mode 100644 index eab1f37d2e9a..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/mutation_record.proto +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "MutationRecordProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// Describes a change made to a configuration. -message MutationRecord { - // When the change occurred. - google.protobuf.Timestamp mutate_time = 1; - - // The email address of the user making the change. - string mutated_by = 2; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/mutation_record_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/mutation_record_pb2.py deleted file mode 100644 index d3658ee2489e..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/mutation_record_pb2.py +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/mutation_record.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/mutation_record.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\023MutationRecordProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n6google/cloud/monitoring_v3/proto/mutation_record.proto\x12\x14google.monitoring.v3\x1a\x1fgoogle/protobuf/timestamp.proto"U\n\x0eMutationRecord\x12/\n\x0bmutate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x12\n\nmutated_by\x18\x02 \x01(\tB\xab\x01\n\x18\x63om.google.monitoring.v3B\x13MutationRecordProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR], -) - - -_MUTATIONRECORD = _descriptor.Descriptor( - name="MutationRecord", - full_name="google.monitoring.v3.MutationRecord", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="mutate_time", - full_name="google.monitoring.v3.MutationRecord.mutate_time", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="mutated_by", - full_name="google.monitoring.v3.MutationRecord.mutated_by", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=113, - serialized_end=198, -) - -_MUTATIONRECORD.fields_by_name[ - "mutate_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -DESCRIPTOR.message_types_by_name["MutationRecord"] = _MUTATIONRECORD -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -MutationRecord = _reflection.GeneratedProtocolMessageType( - "MutationRecord", - (_message.Message,), - dict( - DESCRIPTOR=_MUTATIONRECORD, - __module__="google.cloud.monitoring_v3.proto.mutation_record_pb2", - __doc__="""Describes a change made to a configuration. - - - Attributes: - mutate_time: - When the change occurred. - mutated_by: - The email address of the user making the change. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.MutationRecord) - ), -) -_sym_db.RegisterMessage(MutationRecord) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/mutation_record_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/mutation_record_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/mutation_record_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/monitoring/google/cloud/monitoring_v3/proto/notification.proto b/monitoring/google/cloud/monitoring_v3/proto/notification.proto deleted file mode 100644 index 939ca2677187..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/notification.proto +++ /dev/null @@ -1,163 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -import "google/api/label.proto"; -import "google/api/launch_stage.proto"; -import "google/monitoring/v3/common.proto"; -import "google/protobuf/wrappers.proto"; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "NotificationProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// A description of a notification channel. The descriptor includes -// the properties of the channel and the set of labels or fields that -// must be specified to configure channels of a given type. -message NotificationChannelDescriptor { - // The full REST resource name for this descriptor. The syntax is: - // - // projects/[PROJECT_ID]/notificationChannelDescriptors/[TYPE] - // - // In the above, `[TYPE]` is the value of the `type` field. - string name = 6; - - // The type of notification channel, such as "email", "sms", etc. - // Notification channel types are globally unique. - string type = 1; - - // A human-readable name for the notification channel type. This - // form of the name is suitable for a user interface. - string display_name = 2; - - // A human-readable description of the notification channel - // type. The description may include a description of the properties - // of the channel and pointers to external documentation. - string description = 3; - - // The set of labels that must be defined to identify a particular - // channel of the corresponding type. Each label includes a - // description for how that field should be populated. - repeated google.api.LabelDescriptor labels = 4; - - // The tiers that support this notification channel; the project service tier - // must be one of the supported_tiers. - repeated ServiceTier supported_tiers = 5 [deprecated = true]; -} - -// A `NotificationChannel` is a medium through which an alert is -// delivered when a policy violation is detected. Examples of channels -// include email, SMS, and third-party messaging applications. Fields -// containing sensitive information like authentication tokens or -// contact info are only partially populated on retrieval. -message NotificationChannel { - // Indicates whether the channel has been verified or not. It is illegal - // to specify this field in a - // [`CreateNotificationChannel`][google.monitoring.v3.NotificationChannelService.CreateNotificationChannel] - // or an - // [`UpdateNotificationChannel`][google.monitoring.v3.NotificationChannelService.UpdateNotificationChannel] - // operation. - enum VerificationStatus { - // Sentinel value used to indicate that the state is unknown, omitted, or - // is not applicable (as in the case of channels that neither support - // nor require verification in order to function). - VERIFICATION_STATUS_UNSPECIFIED = 0; - - // The channel has yet to be verified and requires verification to function. - // Note that this state also applies to the case where the verification - // process has been initiated by sending a verification code but where - // the verification code has not been submitted to complete the process. - UNVERIFIED = 1; - - // It has been proven that notifications can be received on this - // notification channel and that someone on the project has access - // to messages that are delivered to that channel. - VERIFIED = 2; - } - - // The type of the notification channel. This field matches the - // value of the [NotificationChannelDescriptor.type][google.monitoring.v3.NotificationChannelDescriptor.type] field. - string type = 1; - - // The full REST resource name for this channel. The syntax is: - // - // projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID] - // - // The `[CHANNEL_ID]` is automatically assigned by the server on creation. - string name = 6; - - // An optional human-readable name for this notification channel. It is - // recommended that you specify a non-empty and unique name in order to - // make it easier to identify the channels in your project, though this is - // not enforced. The display name is limited to 512 Unicode characters. - string display_name = 3; - - // An optional human-readable description of this notification channel. This - // description may provide additional details, beyond the display - // name, for the channel. This may not exceed 1024 Unicode characters. - string description = 4; - - // Configuration fields that define the channel and its behavior. The - // permissible and required labels are specified in the - // [NotificationChannelDescriptor.labels][google.monitoring.v3.NotificationChannelDescriptor.labels] of the - // `NotificationChannelDescriptor` corresponding to the `type` field. - map labels = 5; - - // User-supplied key/value data that does not need to conform to - // the corresponding `NotificationChannelDescriptor`'s schema, unlike - // the `labels` field. This field is intended to be used for organizing - // and identifying the `NotificationChannel` objects. - // - // The field can contain up to 64 entries. Each key and value is limited to - // 63 Unicode characters or 128 bytes, whichever is smaller. Labels and - // values can contain only lowercase letters, numerals, underscores, and - // dashes. Keys must begin with a letter. - map user_labels = 8; - - // Indicates whether this channel has been verified or not. On a - // [`ListNotificationChannels`][google.monitoring.v3.NotificationChannelService.ListNotificationChannels] - // or - // [`GetNotificationChannel`][google.monitoring.v3.NotificationChannelService.GetNotificationChannel] - // operation, this field is expected to be populated. - // - // If the value is `UNVERIFIED`, then it indicates that the channel is - // non-functioning (it both requires verification and lacks verification); - // otherwise, it is assumed that the channel works. - // - // If the channel is neither `VERIFIED` nor `UNVERIFIED`, it implies that - // the channel is of a type that does not require verification or that - // this specific channel has been exempted from verification because it was - // created prior to verification being required for channels of this type. - // - // This field cannot be modified using a standard - // [`UpdateNotificationChannel`][google.monitoring.v3.NotificationChannelService.UpdateNotificationChannel] - // operation. To change the value of this field, you must call - // [`VerifyNotificationChannel`][google.monitoring.v3.NotificationChannelService.VerifyNotificationChannel]. - VerificationStatus verification_status = 9; - - // Whether notifications are forwarded to the described channel. This makes - // it possible to disable delivery of notifications to a particular channel - // without removing the channel from all alerting policies that reference - // the channel. This is a more convenient approach when the change is - // temporary and you want to receive notifications from the same set - // of alerting policies on the channel at some point in the future. - google.protobuf.BoolValue enabled = 11; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/notification_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/notification_pb2.py deleted file mode 100644 index df10c8293d9e..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/notification_pb2.py +++ /dev/null @@ -1,659 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/notification.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import label_pb2 as google_dot_api_dot_label__pb2 -from google.api import launch_stage_pb2 as google_dot_api_dot_launch__stage__pb2 -from google.cloud.monitoring_v3.proto import ( - common_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2, -) -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/notification.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\021NotificationProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n3google/cloud/monitoring_v3/proto/notification.proto\x12\x14google.monitoring.v3\x1a\x16google/api/label.proto\x1a\x1dgoogle/api/launch_stage.proto\x1a-google/cloud/monitoring_v3/proto/common.proto\x1a\x1egoogle/protobuf/wrappers.proto"\xd3\x01\n\x1dNotificationChannelDescriptor\x12\x0c\n\x04name\x18\x06 \x01(\t\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12+\n\x06labels\x18\x04 \x03(\x0b\x32\x1b.google.api.LabelDescriptor\x12>\n\x0fsupported_tiers\x18\x05 \x03(\x0e\x32!.google.monitoring.v3.ServiceTierB\x02\x18\x01"\xb6\x04\n\x13NotificationChannel\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x06 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x45\n\x06labels\x18\x05 \x03(\x0b\x32\x35.google.monitoring.v3.NotificationChannel.LabelsEntry\x12N\n\x0buser_labels\x18\x08 \x03(\x0b\x32\x39.google.monitoring.v3.NotificationChannel.UserLabelsEntry\x12Y\n\x13verification_status\x18\t \x01(\x0e\x32<.google.monitoring.v3.NotificationChannel.VerificationStatus\x12+\n\x07\x65nabled\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fUserLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"W\n\x12VerificationStatus\x12#\n\x1fVERIFICATION_STATUS_UNSPECIFIED\x10\x00\x12\x0e\n\nUNVERIFIED\x10\x01\x12\x0c\n\x08VERIFIED\x10\x02\x42\xa9\x01\n\x18\x63om.google.monitoring.v3B\x11NotificationProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_label__pb2.DESCRIPTOR, - google_dot_api_dot_launch__stage__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2.DESCRIPTOR, - google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, - ], -) - - -_NOTIFICATIONCHANNEL_VERIFICATIONSTATUS = _descriptor.EnumDescriptor( - name="VerificationStatus", - full_name="google.monitoring.v3.NotificationChannel.VerificationStatus", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="VERIFICATION_STATUS_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="UNVERIFIED", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="VERIFIED", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=905, - serialized_end=992, -) -_sym_db.RegisterEnumDescriptor(_NOTIFICATIONCHANNEL_VERIFICATIONSTATUS) - - -_NOTIFICATIONCHANNELDESCRIPTOR = _descriptor.Descriptor( - name="NotificationChannelDescriptor", - full_name="google.monitoring.v3.NotificationChannelDescriptor", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.NotificationChannelDescriptor.name", - index=0, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="type", - full_name="google.monitoring.v3.NotificationChannelDescriptor.type", - index=1, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.monitoring.v3.NotificationChannelDescriptor.display_name", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.monitoring.v3.NotificationChannelDescriptor.description", - index=3, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.monitoring.v3.NotificationChannelDescriptor.labels", - index=4, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="supported_tiers", - full_name="google.monitoring.v3.NotificationChannelDescriptor.supported_tiers", - index=5, - number=5, - type=14, - cpp_type=8, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=212, - serialized_end=423, -) - - -_NOTIFICATIONCHANNEL_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.monitoring.v3.NotificationChannel.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.monitoring.v3.NotificationChannel.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.monitoring.v3.NotificationChannel.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=807, - serialized_end=852, -) - -_NOTIFICATIONCHANNEL_USERLABELSENTRY = _descriptor.Descriptor( - name="UserLabelsEntry", - full_name="google.monitoring.v3.NotificationChannel.UserLabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.monitoring.v3.NotificationChannel.UserLabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.monitoring.v3.NotificationChannel.UserLabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=854, - serialized_end=903, -) - -_NOTIFICATIONCHANNEL = _descriptor.Descriptor( - name="NotificationChannel", - full_name="google.monitoring.v3.NotificationChannel", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="type", - full_name="google.monitoring.v3.NotificationChannel.type", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.NotificationChannel.name", - index=1, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.monitoring.v3.NotificationChannel.display_name", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.monitoring.v3.NotificationChannel.description", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.monitoring.v3.NotificationChannel.labels", - index=4, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="user_labels", - full_name="google.monitoring.v3.NotificationChannel.user_labels", - index=5, - number=8, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="verification_status", - full_name="google.monitoring.v3.NotificationChannel.verification_status", - index=6, - number=9, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="enabled", - full_name="google.monitoring.v3.NotificationChannel.enabled", - index=7, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _NOTIFICATIONCHANNEL_LABELSENTRY, - _NOTIFICATIONCHANNEL_USERLABELSENTRY, - ], - enum_types=[_NOTIFICATIONCHANNEL_VERIFICATIONSTATUS], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=426, - serialized_end=992, -) - -_NOTIFICATIONCHANNELDESCRIPTOR.fields_by_name[ - "labels" -].message_type = google_dot_api_dot_label__pb2._LABELDESCRIPTOR -_NOTIFICATIONCHANNELDESCRIPTOR.fields_by_name[ - "supported_tiers" -].enum_type = google_dot_cloud_dot_monitoring__v3_dot_proto_dot_common__pb2._SERVICETIER -_NOTIFICATIONCHANNEL_LABELSENTRY.containing_type = _NOTIFICATIONCHANNEL -_NOTIFICATIONCHANNEL_USERLABELSENTRY.containing_type = _NOTIFICATIONCHANNEL -_NOTIFICATIONCHANNEL.fields_by_name[ - "labels" -].message_type = _NOTIFICATIONCHANNEL_LABELSENTRY -_NOTIFICATIONCHANNEL.fields_by_name[ - "user_labels" -].message_type = _NOTIFICATIONCHANNEL_USERLABELSENTRY -_NOTIFICATIONCHANNEL.fields_by_name[ - "verification_status" -].enum_type = _NOTIFICATIONCHANNEL_VERIFICATIONSTATUS -_NOTIFICATIONCHANNEL.fields_by_name[ - "enabled" -].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE -_NOTIFICATIONCHANNEL_VERIFICATIONSTATUS.containing_type = _NOTIFICATIONCHANNEL -DESCRIPTOR.message_types_by_name[ - "NotificationChannelDescriptor" -] = _NOTIFICATIONCHANNELDESCRIPTOR -DESCRIPTOR.message_types_by_name["NotificationChannel"] = _NOTIFICATIONCHANNEL -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -NotificationChannelDescriptor = _reflection.GeneratedProtocolMessageType( - "NotificationChannelDescriptor", - (_message.Message,), - dict( - DESCRIPTOR=_NOTIFICATIONCHANNELDESCRIPTOR, - __module__="google.cloud.monitoring_v3.proto.notification_pb2", - __doc__="""A description of a notification channel. The descriptor - includes the properties of the channel and the set of labels or fields - that must be specified to configure channels of a given type. - - - Attributes: - name: - The full REST resource name for this descriptor. The syntax - is: :: - projects/[PROJECT_ID]/notificationChannelDescriptors/[TYPE] - In the above, ``[TYPE]`` is the value of the ``type`` field. - type: - The type of notification channel, such as "email", "sms", etc. - Notification channel types are globally unique. - display_name: - A human-readable name for the notification channel type. This - form of the name is suitable for a user interface. - description: - A human-readable description of the notification channel type. - The description may include a description of the properties of - the channel and pointers to external documentation. - labels: - The set of labels that must be defined to identify a - particular channel of the corresponding type. Each label - includes a description for how that field should be populated. - supported_tiers: - The tiers that support this notification channel; the project - service tier must be one of the supported\_tiers. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.NotificationChannelDescriptor) - ), -) -_sym_db.RegisterMessage(NotificationChannelDescriptor) - -NotificationChannel = _reflection.GeneratedProtocolMessageType( - "NotificationChannel", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_NOTIFICATIONCHANNEL_LABELSENTRY, - __module__="google.cloud.monitoring_v3.proto.notification_pb2" - # @@protoc_insertion_point(class_scope:google.monitoring.v3.NotificationChannel.LabelsEntry) - ), - ), - UserLabelsEntry=_reflection.GeneratedProtocolMessageType( - "UserLabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_NOTIFICATIONCHANNEL_USERLABELSENTRY, - __module__="google.cloud.monitoring_v3.proto.notification_pb2" - # @@protoc_insertion_point(class_scope:google.monitoring.v3.NotificationChannel.UserLabelsEntry) - ), - ), - DESCRIPTOR=_NOTIFICATIONCHANNEL, - __module__="google.cloud.monitoring_v3.proto.notification_pb2", - __doc__="""A ``NotificationChannel`` is a medium through which an - alert is delivered when a policy violation is detected. Examples of - channels include email, SMS, and third-party messaging applications. - Fields containing sensitive information like authentication tokens or - contact info are only partially populated on retrieval. - - - Attributes: - type: - The type of the notification channel. This field matches the - value of the [NotificationChannelDescriptor.type][google.monit - oring.v3.NotificationChannelDescriptor.type] field. - name: - The full REST resource name for this channel. The syntax is: - :: - projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID] The - ``[CHANNEL_ID]`` is automatically assigned by the server on - creation. - display_name: - An optional human-readable name for this notification channel. - It is recommended that you specify a non-empty and unique name - in order to make it easier to identify the channels in your - project, though this is not enforced. The display name is - limited to 512 Unicode characters. - description: - An optional human-readable description of this notification - channel. This description may provide additional details, - beyond the display name, for the channel. This may not exceed - 1024 Unicode characters. - labels: - Configuration fields that define the channel and its behavior. - The permissible and required labels are specified in the [Noti - ficationChannelDescriptor.labels][google.monitoring.v3.Notific - ationChannelDescriptor.labels] of the - ``NotificationChannelDescriptor`` corresponding to the - ``type`` field. - user_labels: - User-supplied key/value data that does not need to conform to - the corresponding ``NotificationChannelDescriptor``'s schema, - unlike the ``labels`` field. This field is intended to be used - for organizing and identifying the ``NotificationChannel`` - objects. The field can contain up to 64 entries. Each key and - value is limited to 63 Unicode characters or 128 bytes, - whichever is smaller. Labels and values can contain only - lowercase letters, numerals, underscores, and dashes. Keys - must begin with a letter. - verification_status: - Indicates whether this channel has been verified or not. On a - [``ListNotificationChannels``][google.monitoring.v3.Notificati - onChannelService.ListNotificationChannels] or [``GetNotificati - onChannel``][google.monitoring.v3.NotificationChannelService.G - etNotificationChannel] operation, this field is expected to be - populated. If the value is ``UNVERIFIED``, then it indicates - that the channel is non-functioning (it both requires - verification and lacks verification); otherwise, it is assumed - that the channel works. If the channel is neither - ``VERIFIED`` nor ``UNVERIFIED``, it implies that the channel - is of a type that does not require verification or that this - specific channel has been exempted from verification because - it was created prior to verification being required for - channels of this type. This field cannot be modified using a - standard [``UpdateNotificationChannel``][google.monitoring.v3. - NotificationChannelService.UpdateNotificationChannel] - operation. To change the value of this field, you must call [` - `VerifyNotificationChannel``][google.monitoring.v3.Notificatio - nChannelService.VerifyNotificationChannel]. - enabled: - Whether notifications are forwarded to the described channel. - This makes it possible to disable delivery of notifications to - a particular channel without removing the channel from all - alerting policies that reference the channel. This is a more - convenient approach when the change is temporary and you want - to receive notifications from the same set of alerting - policies on the channel at some point in the future. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.NotificationChannel) - ), -) -_sym_db.RegisterMessage(NotificationChannel) -_sym_db.RegisterMessage(NotificationChannel.LabelsEntry) -_sym_db.RegisterMessage(NotificationChannel.UserLabelsEntry) - - -DESCRIPTOR._options = None -_NOTIFICATIONCHANNELDESCRIPTOR.fields_by_name["supported_tiers"]._options = None -_NOTIFICATIONCHANNEL_LABELSENTRY._options = None -_NOTIFICATIONCHANNEL_USERLABELSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/notification_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/notification_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/notification_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/monitoring/google/cloud/monitoring_v3/proto/notification_service.proto b/monitoring/google/cloud/monitoring_v3/proto/notification_service.proto deleted file mode 100644 index 263acf83b62f..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/notification_service.proto +++ /dev/null @@ -1,344 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -import "google/api/annotations.proto"; -import "google/monitoring/v3/notification.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/struct.proto"; -import "google/protobuf/timestamp.proto"; -import "google/api/client.proto"; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "NotificationServiceProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// The Notification Channel API provides access to configuration that -// controls how messages related to incidents are sent. -service NotificationChannelService { - option (google.api.default_host) = "monitoring.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/monitoring," - "https://www.googleapis.com/auth/monitoring.read"; - - // Lists the descriptors for supported channel types. The use of descriptors - // makes it possible for new channel types to be dynamically added. - rpc ListNotificationChannelDescriptors(ListNotificationChannelDescriptorsRequest) returns (ListNotificationChannelDescriptorsResponse) { - option (google.api.http) = { - get: "/v3/{name=projects/*}/notificationChannelDescriptors" - }; - } - - // Gets a single channel descriptor. The descriptor indicates which fields - // are expected / permitted for a notification channel of the given type. - rpc GetNotificationChannelDescriptor(GetNotificationChannelDescriptorRequest) returns (NotificationChannelDescriptor) { - option (google.api.http) = { - get: "/v3/{name=projects/*/notificationChannelDescriptors/*}" - }; - } - - // Lists the notification channels that have been created for the project. - rpc ListNotificationChannels(ListNotificationChannelsRequest) returns (ListNotificationChannelsResponse) { - option (google.api.http) = { - get: "/v3/{name=projects/*}/notificationChannels" - }; - } - - // Gets a single notification channel. The channel includes the relevant - // configuration details with which the channel was created. However, the - // response may truncate or omit passwords, API keys, or other private key - // matter and thus the response may not be 100% identical to the information - // that was supplied in the call to the create method. - rpc GetNotificationChannel(GetNotificationChannelRequest) returns (NotificationChannel) { - option (google.api.http) = { - get: "/v3/{name=projects/*/notificationChannels/*}" - }; - } - - // Creates a new notification channel, representing a single notification - // endpoint such as an email address, SMS number, or PagerDuty service. - rpc CreateNotificationChannel(CreateNotificationChannelRequest) returns (NotificationChannel) { - option (google.api.http) = { - post: "/v3/{name=projects/*}/notificationChannels" - body: "notification_channel" - }; - } - - // Updates a notification channel. Fields not specified in the field mask - // remain unchanged. - rpc UpdateNotificationChannel(UpdateNotificationChannelRequest) returns (NotificationChannel) { - option (google.api.http) = { - patch: "/v3/{notification_channel.name=projects/*/notificationChannels/*}" - body: "notification_channel" - }; - } - - // Deletes a notification channel. - rpc DeleteNotificationChannel(DeleteNotificationChannelRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v3/{name=projects/*/notificationChannels/*}" - }; - } - - // Causes a verification code to be delivered to the channel. The code - // can then be supplied in `VerifyNotificationChannel` to verify the channel. - rpc SendNotificationChannelVerificationCode(SendNotificationChannelVerificationCodeRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v3/{name=projects/*/notificationChannels/*}:sendVerificationCode" - body: "*" - }; - } - - // Requests a verification code for an already verified channel that can then - // be used in a call to VerifyNotificationChannel() on a different channel - // with an equivalent identity in the same or in a different project. This - // makes it possible to copy a channel between projects without requiring - // manual reverification of the channel. If the channel is not in the - // verified state, this method will fail (in other words, this may only be - // used if the SendNotificationChannelVerificationCode and - // VerifyNotificationChannel paths have already been used to put the given - // channel into the verified state). - // - // There is no guarantee that the verification codes returned by this method - // will be of a similar structure or form as the ones that are delivered - // to the channel via SendNotificationChannelVerificationCode; while - // VerifyNotificationChannel() will recognize both the codes delivered via - // SendNotificationChannelVerificationCode() and returned from - // GetNotificationChannelVerificationCode(), it is typically the case that - // the verification codes delivered via - // SendNotificationChannelVerificationCode() will be shorter and also - // have a shorter expiration (e.g. codes such as "G-123456") whereas - // GetVerificationCode() will typically return a much longer, websafe base - // 64 encoded string that has a longer expiration time. - rpc GetNotificationChannelVerificationCode(GetNotificationChannelVerificationCodeRequest) returns (GetNotificationChannelVerificationCodeResponse) { - option (google.api.http) = { - post: "/v3/{name=projects/*/notificationChannels/*}:getVerificationCode" - body: "*" - }; - } - - // Verifies a `NotificationChannel` by proving receipt of the code - // delivered to the channel as a result of calling - // `SendNotificationChannelVerificationCode`. - rpc VerifyNotificationChannel(VerifyNotificationChannelRequest) returns (NotificationChannel) { - option (google.api.http) = { - post: "/v3/{name=projects/*/notificationChannels/*}:verify" - body: "*" - }; - } -} - -// The `ListNotificationChannelDescriptors` request. -message ListNotificationChannelDescriptorsRequest { - // The REST resource name of the parent from which to retrieve - // the notification channel descriptors. The expected syntax is: - // - // projects/[PROJECT_ID] - // - // Note that this names the parent container in which to look for the - // descriptors; to retrieve a single descriptor by name, use the - // [GetNotificationChannelDescriptor][google.monitoring.v3.NotificationChannelService.GetNotificationChannelDescriptor] - // operation, instead. - string name = 4; - - // The maximum number of results to return in a single response. If - // not set to a positive number, a reasonable value will be chosen by the - // service. - int32 page_size = 2; - - // If non-empty, `page_token` must contain a value returned as the - // `next_page_token` in a previous response to request the next set - // of results. - string page_token = 3; -} - -// The `ListNotificationChannelDescriptors` response. -message ListNotificationChannelDescriptorsResponse { - // The monitored resource descriptors supported for the specified - // project, optionally filtered. - repeated NotificationChannelDescriptor channel_descriptors = 1; - - // If not empty, indicates that there may be more results that match - // the request. Use the value in the `page_token` field in a - // subsequent request to fetch the next set of results. If empty, - // all results have been returned. - string next_page_token = 2; -} - -// The `GetNotificationChannelDescriptor` response. -message GetNotificationChannelDescriptorRequest { - // The channel type for which to execute the request. The format is - // `projects/[PROJECT_ID]/notificationChannelDescriptors/{channel_type}`. - string name = 3; -} - -// The `CreateNotificationChannel` request. -message CreateNotificationChannelRequest { - // The project on which to execute the request. The format is: - // - // projects/[PROJECT_ID] - // - // Note that this names the container into which the channel will be - // written. This does not name the newly created channel. The resulting - // channel's name will have a normalized version of this field as a prefix, - // but will add `/notificationChannels/[CHANNEL_ID]` to identify the channel. - string name = 3; - - // The definition of the `NotificationChannel` to create. - NotificationChannel notification_channel = 2; -} - -// The `ListNotificationChannels` request. -message ListNotificationChannelsRequest { - // The project on which to execute the request. The format is - // `projects/[PROJECT_ID]`. That is, this names the container - // in which to look for the notification channels; it does not name a - // specific channel. To query a specific channel by REST resource name, use - // the - // [`GetNotificationChannel`][google.monitoring.v3.NotificationChannelService.GetNotificationChannel] - // operation. - string name = 5; - - // If provided, this field specifies the criteria that must be met by - // notification channels to be included in the response. - // - // For more details, see [sorting and - // filtering](/monitoring/api/v3/sorting-and-filtering). - string filter = 6; - - // A comma-separated list of fields by which to sort the result. Supports - // the same set of fields as in `filter`. Entries can be prefixed with - // a minus sign to sort in descending rather than ascending order. - // - // For more details, see [sorting and - // filtering](/monitoring/api/v3/sorting-and-filtering). - string order_by = 7; - - // The maximum number of results to return in a single response. If - // not set to a positive number, a reasonable value will be chosen by the - // service. - int32 page_size = 3; - - // If non-empty, `page_token` must contain a value returned as the - // `next_page_token` in a previous response to request the next set - // of results. - string page_token = 4; -} - -// The `ListNotificationChannels` response. -message ListNotificationChannelsResponse { - // The notification channels defined for the specified project. - repeated NotificationChannel notification_channels = 3; - - // If not empty, indicates that there may be more results that match - // the request. Use the value in the `page_token` field in a - // subsequent request to fetch the next set of results. If empty, - // all results have been returned. - string next_page_token = 2; -} - -// The `GetNotificationChannel` request. -message GetNotificationChannelRequest { - // The channel for which to execute the request. The format is - // `projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID]`. - string name = 3; -} - -// The `UpdateNotificationChannel` request. -message UpdateNotificationChannelRequest { - // The fields to update. - google.protobuf.FieldMask update_mask = 2; - - // A description of the changes to be applied to the specified - // notification channel. The description must provide a definition for - // fields to be updated; the names of these fields should also be - // included in the `update_mask`. - NotificationChannel notification_channel = 3; -} - -// The `DeleteNotificationChannel` request. -message DeleteNotificationChannelRequest { - // The channel for which to execute the request. The format is - // `projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID]`. - string name = 3; - - // If true, the notification channel will be deleted regardless of its - // use in alert policies (the policies will be updated to remove the - // channel). If false, channels that are still referenced by an existing - // alerting policy will fail to be deleted in a delete operation. - bool force = 5; -} - -// The `SendNotificationChannelVerificationCode` request. -message SendNotificationChannelVerificationCodeRequest { - // The notification channel to which to send a verification code. - string name = 1; -} - -// The `GetNotificationChannelVerificationCode` request. -message GetNotificationChannelVerificationCodeRequest { - // The notification channel for which a verification code is to be generated - // and retrieved. This must name a channel that is already verified; if - // the specified channel is not verified, the request will fail. - string name = 1; - - // The desired expiration time. If specified, the API will guarantee that - // the returned code will not be valid after the specified timestamp; - // however, the API cannot guarantee that the returned code will be - // valid for at least as long as the requested time (the API puts an upper - // bound on the amount of time for which a code may be valid). If omitted, - // a default expiration will be used, which may be less than the max - // permissible expiration (so specifying an expiration may extend the - // code's lifetime over omitting an expiration, even though the API does - // impose an upper limit on the maximum expiration that is permitted). - google.protobuf.Timestamp expire_time = 2; -} - -// The `GetNotificationChannelVerificationCode` request. -message GetNotificationChannelVerificationCodeResponse { - // The verification code, which may be used to verify other channels - // that have an equivalent identity (i.e. other channels of the same - // type with the same fingerprint such as other email channels with - // the same email address or other sms channels with the same number). - string code = 1; - - // The expiration time associated with the code that was returned. If - // an expiration was provided in the request, this is the minimum of the - // requested expiration in the request and the max permitted expiration. - google.protobuf.Timestamp expire_time = 2; -} - -// The `VerifyNotificationChannel` request. -message VerifyNotificationChannelRequest { - // The notification channel to verify. - string name = 1; - - // The verification code that was delivered to the channel as - // a result of invoking the `SendNotificationChannelVerificationCode` API - // method or that was retrieved from a verified channel via - // `GetNotificationChannelVerificationCode`. For example, one might have - // "G-123456" or "TKNZGhhd2EyN3I1MnRnMjRv" (in general, one is only - // guaranteed that the code is valid UTF-8; one should not - // make any assumptions regarding the structure or format of the code). - string code = 2; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/notification_service_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/notification_service_pb2.py deleted file mode 100644 index d8b09b505ea1..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/notification_service_pb2.py +++ /dev/null @@ -1,1350 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/notification_service.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.monitoring_v3.proto import ( - notification_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/notification_service.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\030NotificationServiceProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n;google/cloud/monitoring_v3/proto/notification_service.proto\x12\x14google.monitoring.v3\x1a\x1cgoogle/api/annotations.proto\x1a\x33google/cloud/monitoring_v3/proto/notification.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/api/client.proto"`\n)ListNotificationChannelDescriptorsRequest\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"\x97\x01\n*ListNotificationChannelDescriptorsResponse\x12P\n\x13\x63hannel_descriptors\x18\x01 \x03(\x0b\x32\x33.google.monitoring.v3.NotificationChannelDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"7\n\'GetNotificationChannelDescriptorRequest\x12\x0c\n\x04name\x18\x03 \x01(\t"y\n CreateNotificationChannelRequest\x12\x0c\n\x04name\x18\x03 \x01(\t\x12G\n\x14notification_channel\x18\x02 \x01(\x0b\x32).google.monitoring.v3.NotificationChannel"x\n\x1fListNotificationChannelsRequest\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x06 \x01(\t\x12\x10\n\x08order_by\x18\x07 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"\x85\x01\n ListNotificationChannelsResponse\x12H\n\x15notification_channels\x18\x03 \x03(\x0b\x32).google.monitoring.v3.NotificationChannel\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"-\n\x1dGetNotificationChannelRequest\x12\x0c\n\x04name\x18\x03 \x01(\t"\x9c\x01\n UpdateNotificationChannelRequest\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12G\n\x14notification_channel\x18\x03 \x01(\x0b\x32).google.monitoring.v3.NotificationChannel"?\n DeleteNotificationChannelRequest\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\r\n\x05\x66orce\x18\x05 \x01(\x08">\n.SendNotificationChannelVerificationCodeRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"n\n-GetNotificationChannelVerificationCodeRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"o\n.GetNotificationChannelVerificationCodeResponse\x12\x0c\n\x04\x63ode\x18\x01 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp">\n VerifyNotificationChannelRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t2\xe7\x11\n\x1aNotificationChannelService\x12\xe5\x01\n"ListNotificationChannelDescriptors\x12?.google.monitoring.v3.ListNotificationChannelDescriptorsRequest\x1a@.google.monitoring.v3.ListNotificationChannelDescriptorsResponse"<\x82\xd3\xe4\x93\x02\x36\x12\x34/v3/{name=projects/*}/notificationChannelDescriptors\x12\xd6\x01\n GetNotificationChannelDescriptor\x12=.google.monitoring.v3.GetNotificationChannelDescriptorRequest\x1a\x33.google.monitoring.v3.NotificationChannelDescriptor">\x82\xd3\xe4\x93\x02\x38\x12\x36/v3/{name=projects/*/notificationChannelDescriptors/*}\x12\xbd\x01\n\x18ListNotificationChannels\x12\x35.google.monitoring.v3.ListNotificationChannelsRequest\x1a\x36.google.monitoring.v3.ListNotificationChannelsResponse"2\x82\xd3\xe4\x93\x02,\x12*/v3/{name=projects/*}/notificationChannels\x12\xae\x01\n\x16GetNotificationChannel\x12\x33.google.monitoring.v3.GetNotificationChannelRequest\x1a).google.monitoring.v3.NotificationChannel"4\x82\xd3\xe4\x93\x02.\x12,/v3/{name=projects/*/notificationChannels/*}\x12\xc8\x01\n\x19\x43reateNotificationChannel\x12\x36.google.monitoring.v3.CreateNotificationChannelRequest\x1a).google.monitoring.v3.NotificationChannel"H\x82\xd3\xe4\x93\x02\x42"*/v3/{name=projects/*}/notificationChannels:\x14notification_channel\x12\xdf\x01\n\x19UpdateNotificationChannel\x12\x36.google.monitoring.v3.UpdateNotificationChannelRequest\x1a).google.monitoring.v3.NotificationChannel"_\x82\xd3\xe4\x93\x02Y2A/v3/{notification_channel.name=projects/*/notificationChannels/*}:\x14notification_channel\x12\xa1\x01\n\x19\x44\x65leteNotificationChannel\x12\x36.google.monitoring.v3.DeleteNotificationChannelRequest\x1a\x16.google.protobuf.Empty"4\x82\xd3\xe4\x93\x02.*,/v3/{name=projects/*/notificationChannels/*}\x12\xd5\x01\n\'SendNotificationChannelVerificationCode\x12\x44.google.monitoring.v3.SendNotificationChannelVerificationCodeRequest\x1a\x16.google.protobuf.Empty"L\x82\xd3\xe4\x93\x02\x46"A/v3/{name=projects/*/notificationChannels/*}:sendVerificationCode:\x01*\x12\x80\x02\n&GetNotificationChannelVerificationCode\x12\x43.google.monitoring.v3.GetNotificationChannelVerificationCodeRequest\x1a\x44.google.monitoring.v3.GetNotificationChannelVerificationCodeResponse"K\x82\xd3\xe4\x93\x02\x45"@/v3/{name=projects/*/notificationChannels/*}:getVerificationCode:\x01*\x12\xbe\x01\n\x19VerifyNotificationChannel\x12\x36.google.monitoring.v3.VerifyNotificationChannelRequest\x1a).google.monitoring.v3.NotificationChannel">\x82\xd3\xe4\x93\x02\x38"3/v3/{name=projects/*/notificationChannels/*}:verify:\x01*\x1a\xa9\x01\xca\x41\x19monitoring.googleapis.com\xd2\x41\x89\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.readB\xb0\x01\n\x18\x63om.google.monitoring.v3B\x18NotificationServiceProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_LISTNOTIFICATIONCHANNELDESCRIPTORSREQUEST = _descriptor.Descriptor( - name="ListNotificationChannelDescriptorsRequest", - full_name="google.monitoring.v3.ListNotificationChannelDescriptorsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.ListNotificationChannelDescriptorsRequest.name", - index=0, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.monitoring.v3.ListNotificationChannelDescriptorsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.monitoring.v3.ListNotificationChannelDescriptorsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=319, - serialized_end=415, -) - - -_LISTNOTIFICATIONCHANNELDESCRIPTORSRESPONSE = _descriptor.Descriptor( - name="ListNotificationChannelDescriptorsResponse", - full_name="google.monitoring.v3.ListNotificationChannelDescriptorsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="channel_descriptors", - full_name="google.monitoring.v3.ListNotificationChannelDescriptorsResponse.channel_descriptors", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.monitoring.v3.ListNotificationChannelDescriptorsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=418, - serialized_end=569, -) - - -_GETNOTIFICATIONCHANNELDESCRIPTORREQUEST = _descriptor.Descriptor( - name="GetNotificationChannelDescriptorRequest", - full_name="google.monitoring.v3.GetNotificationChannelDescriptorRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.GetNotificationChannelDescriptorRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=571, - serialized_end=626, -) - - -_CREATENOTIFICATIONCHANNELREQUEST = _descriptor.Descriptor( - name="CreateNotificationChannelRequest", - full_name="google.monitoring.v3.CreateNotificationChannelRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.CreateNotificationChannelRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="notification_channel", - full_name="google.monitoring.v3.CreateNotificationChannelRequest.notification_channel", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=628, - serialized_end=749, -) - - -_LISTNOTIFICATIONCHANNELSREQUEST = _descriptor.Descriptor( - name="ListNotificationChannelsRequest", - full_name="google.monitoring.v3.ListNotificationChannelsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.ListNotificationChannelsRequest.name", - index=0, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.monitoring.v3.ListNotificationChannelsRequest.filter", - index=1, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="order_by", - full_name="google.monitoring.v3.ListNotificationChannelsRequest.order_by", - index=2, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.monitoring.v3.ListNotificationChannelsRequest.page_size", - index=3, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.monitoring.v3.ListNotificationChannelsRequest.page_token", - index=4, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=751, - serialized_end=871, -) - - -_LISTNOTIFICATIONCHANNELSRESPONSE = _descriptor.Descriptor( - name="ListNotificationChannelsResponse", - full_name="google.monitoring.v3.ListNotificationChannelsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="notification_channels", - full_name="google.monitoring.v3.ListNotificationChannelsResponse.notification_channels", - index=0, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.monitoring.v3.ListNotificationChannelsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=874, - serialized_end=1007, -) - - -_GETNOTIFICATIONCHANNELREQUEST = _descriptor.Descriptor( - name="GetNotificationChannelRequest", - full_name="google.monitoring.v3.GetNotificationChannelRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.GetNotificationChannelRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1009, - serialized_end=1054, -) - - -_UPDATENOTIFICATIONCHANNELREQUEST = _descriptor.Descriptor( - name="UpdateNotificationChannelRequest", - full_name="google.monitoring.v3.UpdateNotificationChannelRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.monitoring.v3.UpdateNotificationChannelRequest.update_mask", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="notification_channel", - full_name="google.monitoring.v3.UpdateNotificationChannelRequest.notification_channel", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1057, - serialized_end=1213, -) - - -_DELETENOTIFICATIONCHANNELREQUEST = _descriptor.Descriptor( - name="DeleteNotificationChannelRequest", - full_name="google.monitoring.v3.DeleteNotificationChannelRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.DeleteNotificationChannelRequest.name", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="force", - full_name="google.monitoring.v3.DeleteNotificationChannelRequest.force", - index=1, - number=5, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1215, - serialized_end=1278, -) - - -_SENDNOTIFICATIONCHANNELVERIFICATIONCODEREQUEST = _descriptor.Descriptor( - name="SendNotificationChannelVerificationCodeRequest", - full_name="google.monitoring.v3.SendNotificationChannelVerificationCodeRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.SendNotificationChannelVerificationCodeRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1280, - serialized_end=1342, -) - - -_GETNOTIFICATIONCHANNELVERIFICATIONCODEREQUEST = _descriptor.Descriptor( - name="GetNotificationChannelVerificationCodeRequest", - full_name="google.monitoring.v3.GetNotificationChannelVerificationCodeRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.GetNotificationChannelVerificationCodeRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="expire_time", - full_name="google.monitoring.v3.GetNotificationChannelVerificationCodeRequest.expire_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1344, - serialized_end=1454, -) - - -_GETNOTIFICATIONCHANNELVERIFICATIONCODERESPONSE = _descriptor.Descriptor( - name="GetNotificationChannelVerificationCodeResponse", - full_name="google.monitoring.v3.GetNotificationChannelVerificationCodeResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="code", - full_name="google.monitoring.v3.GetNotificationChannelVerificationCodeResponse.code", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="expire_time", - full_name="google.monitoring.v3.GetNotificationChannelVerificationCodeResponse.expire_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1456, - serialized_end=1567, -) - - -_VERIFYNOTIFICATIONCHANNELREQUEST = _descriptor.Descriptor( - name="VerifyNotificationChannelRequest", - full_name="google.monitoring.v3.VerifyNotificationChannelRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.VerifyNotificationChannelRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="code", - full_name="google.monitoring.v3.VerifyNotificationChannelRequest.code", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1569, - serialized_end=1631, -) - -_LISTNOTIFICATIONCHANNELDESCRIPTORSRESPONSE.fields_by_name[ - "channel_descriptors" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2._NOTIFICATIONCHANNELDESCRIPTOR -) -_CREATENOTIFICATIONCHANNELREQUEST.fields_by_name[ - "notification_channel" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2._NOTIFICATIONCHANNEL -) -_LISTNOTIFICATIONCHANNELSRESPONSE.fields_by_name[ - "notification_channels" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2._NOTIFICATIONCHANNEL -) -_UPDATENOTIFICATIONCHANNELREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_UPDATENOTIFICATIONCHANNELREQUEST.fields_by_name[ - "notification_channel" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2._NOTIFICATIONCHANNEL -) -_GETNOTIFICATIONCHANNELVERIFICATIONCODEREQUEST.fields_by_name[ - "expire_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_GETNOTIFICATIONCHANNELVERIFICATIONCODERESPONSE.fields_by_name[ - "expire_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -DESCRIPTOR.message_types_by_name[ - "ListNotificationChannelDescriptorsRequest" -] = _LISTNOTIFICATIONCHANNELDESCRIPTORSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListNotificationChannelDescriptorsResponse" -] = _LISTNOTIFICATIONCHANNELDESCRIPTORSRESPONSE -DESCRIPTOR.message_types_by_name[ - "GetNotificationChannelDescriptorRequest" -] = _GETNOTIFICATIONCHANNELDESCRIPTORREQUEST -DESCRIPTOR.message_types_by_name[ - "CreateNotificationChannelRequest" -] = _CREATENOTIFICATIONCHANNELREQUEST -DESCRIPTOR.message_types_by_name[ - "ListNotificationChannelsRequest" -] = _LISTNOTIFICATIONCHANNELSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListNotificationChannelsResponse" -] = _LISTNOTIFICATIONCHANNELSRESPONSE -DESCRIPTOR.message_types_by_name[ - "GetNotificationChannelRequest" -] = _GETNOTIFICATIONCHANNELREQUEST -DESCRIPTOR.message_types_by_name[ - "UpdateNotificationChannelRequest" -] = _UPDATENOTIFICATIONCHANNELREQUEST -DESCRIPTOR.message_types_by_name[ - "DeleteNotificationChannelRequest" -] = _DELETENOTIFICATIONCHANNELREQUEST -DESCRIPTOR.message_types_by_name[ - "SendNotificationChannelVerificationCodeRequest" -] = _SENDNOTIFICATIONCHANNELVERIFICATIONCODEREQUEST -DESCRIPTOR.message_types_by_name[ - "GetNotificationChannelVerificationCodeRequest" -] = _GETNOTIFICATIONCHANNELVERIFICATIONCODEREQUEST -DESCRIPTOR.message_types_by_name[ - "GetNotificationChannelVerificationCodeResponse" -] = _GETNOTIFICATIONCHANNELVERIFICATIONCODERESPONSE -DESCRIPTOR.message_types_by_name[ - "VerifyNotificationChannelRequest" -] = _VERIFYNOTIFICATIONCHANNELREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ListNotificationChannelDescriptorsRequest = _reflection.GeneratedProtocolMessageType( - "ListNotificationChannelDescriptorsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTNOTIFICATIONCHANNELDESCRIPTORSREQUEST, - __module__="google.cloud.monitoring_v3.proto.notification_service_pb2", - __doc__="""The ``ListNotificationChannelDescriptors`` request. - - - Attributes: - name: - The REST resource name of the parent from which to retrieve - the notification channel descriptors. The expected syntax is: - :: projects/[PROJECT_ID] Note that this names the parent - container in which to look for the descriptors; to retrieve a - single descriptor by name, use the [GetNotificationChannelDesc - riptor][google.monitoring.v3.NotificationChannelService.GetNot - ificationChannelDescriptor] operation, instead. - page_size: - The maximum number of results to return in a single response. - If not set to a positive number, a reasonable value will be - chosen by the service. - page_token: - If non-empty, ``page_token`` must contain a value returned as - the ``next_page_token`` in a previous response to request the - next set of results. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListNotificationChannelDescriptorsRequest) - ), -) -_sym_db.RegisterMessage(ListNotificationChannelDescriptorsRequest) - -ListNotificationChannelDescriptorsResponse = _reflection.GeneratedProtocolMessageType( - "ListNotificationChannelDescriptorsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTNOTIFICATIONCHANNELDESCRIPTORSRESPONSE, - __module__="google.cloud.monitoring_v3.proto.notification_service_pb2", - __doc__="""The ``ListNotificationChannelDescriptors`` response. - - - Attributes: - channel_descriptors: - The monitored resource descriptors supported for the specified - project, optionally filtered. - next_page_token: - If not empty, indicates that there may be more results that - match the request. Use the value in the ``page_token`` field - in a subsequent request to fetch the next set of results. If - empty, all results have been returned. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListNotificationChannelDescriptorsResponse) - ), -) -_sym_db.RegisterMessage(ListNotificationChannelDescriptorsResponse) - -GetNotificationChannelDescriptorRequest = _reflection.GeneratedProtocolMessageType( - "GetNotificationChannelDescriptorRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETNOTIFICATIONCHANNELDESCRIPTORREQUEST, - __module__="google.cloud.monitoring_v3.proto.notification_service_pb2", - __doc__="""The ``GetNotificationChannelDescriptor`` response. - - - Attributes: - name: - The channel type for which to execute the request. The format - is ``projects/[PROJECT_ID]/notificationChannelDescriptors/{cha - nnel_type}``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.GetNotificationChannelDescriptorRequest) - ), -) -_sym_db.RegisterMessage(GetNotificationChannelDescriptorRequest) - -CreateNotificationChannelRequest = _reflection.GeneratedProtocolMessageType( - "CreateNotificationChannelRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATENOTIFICATIONCHANNELREQUEST, - __module__="google.cloud.monitoring_v3.proto.notification_service_pb2", - __doc__="""The ``CreateNotificationChannel`` request. - - - Attributes: - name: - The project on which to execute the request. The format is: - :: projects/[PROJECT_ID] Note that this names the - container into which the channel will be written. This does - not name the newly created channel. The resulting channel's - name will have a normalized version of this field as a prefix, - but will add ``/notificationChannels/[CHANNEL_ID]`` to - identify the channel. - notification_channel: - The definition of the ``NotificationChannel`` to create. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateNotificationChannelRequest) - ), -) -_sym_db.RegisterMessage(CreateNotificationChannelRequest) - -ListNotificationChannelsRequest = _reflection.GeneratedProtocolMessageType( - "ListNotificationChannelsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTNOTIFICATIONCHANNELSREQUEST, - __module__="google.cloud.monitoring_v3.proto.notification_service_pb2", - __doc__="""The ``ListNotificationChannels`` request. - - - Attributes: - name: - The project on which to execute the request. The format is - ``projects/[PROJECT_ID]``. That is, this names the container - in which to look for the notification channels; it does not - name a specific channel. To query a specific channel by REST - resource name, use the [``GetNotificationChannel``][google.mon - itoring.v3.NotificationChannelService.GetNotificationChannel] - operation. - filter: - If provided, this field specifies the criteria that must be - met by notification channels to be included in the response. - For more details, see `sorting and filtering - `__. - order_by: - A comma-separated list of fields by which to sort the result. - Supports the same set of fields as in ``filter``. Entries can - be prefixed with a minus sign to sort in descending rather - than ascending order. For more details, see `sorting and - filtering `__. - page_size: - The maximum number of results to return in a single response. - If not set to a positive number, a reasonable value will be - chosen by the service. - page_token: - If non-empty, ``page_token`` must contain a value returned as - the ``next_page_token`` in a previous response to request the - next set of results. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListNotificationChannelsRequest) - ), -) -_sym_db.RegisterMessage(ListNotificationChannelsRequest) - -ListNotificationChannelsResponse = _reflection.GeneratedProtocolMessageType( - "ListNotificationChannelsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTNOTIFICATIONCHANNELSRESPONSE, - __module__="google.cloud.monitoring_v3.proto.notification_service_pb2", - __doc__="""The ``ListNotificationChannels`` response. - - - Attributes: - notification_channels: - The notification channels defined for the specified project. - next_page_token: - If not empty, indicates that there may be more results that - match the request. Use the value in the ``page_token`` field - in a subsequent request to fetch the next set of results. If - empty, all results have been returned. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListNotificationChannelsResponse) - ), -) -_sym_db.RegisterMessage(ListNotificationChannelsResponse) - -GetNotificationChannelRequest = _reflection.GeneratedProtocolMessageType( - "GetNotificationChannelRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETNOTIFICATIONCHANNELREQUEST, - __module__="google.cloud.monitoring_v3.proto.notification_service_pb2", - __doc__="""The ``GetNotificationChannel`` request. - - - Attributes: - name: - The channel for which to execute the request. The format is - ``projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID]``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.GetNotificationChannelRequest) - ), -) -_sym_db.RegisterMessage(GetNotificationChannelRequest) - -UpdateNotificationChannelRequest = _reflection.GeneratedProtocolMessageType( - "UpdateNotificationChannelRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATENOTIFICATIONCHANNELREQUEST, - __module__="google.cloud.monitoring_v3.proto.notification_service_pb2", - __doc__="""The ``UpdateNotificationChannel`` request. - - - Attributes: - update_mask: - The fields to update. - notification_channel: - A description of the changes to be applied to the specified - notification channel. The description must provide a - definition for fields to be updated; the names of these fields - should also be included in the ``update_mask``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UpdateNotificationChannelRequest) - ), -) -_sym_db.RegisterMessage(UpdateNotificationChannelRequest) - -DeleteNotificationChannelRequest = _reflection.GeneratedProtocolMessageType( - "DeleteNotificationChannelRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETENOTIFICATIONCHANNELREQUEST, - __module__="google.cloud.monitoring_v3.proto.notification_service_pb2", - __doc__="""The ``DeleteNotificationChannel`` request. - - - Attributes: - name: - The channel for which to execute the request. The format is - ``projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID]``. - force: - If true, the notification channel will be deleted regardless - of its use in alert policies (the policies will be updated to - remove the channel). If false, channels that are still - referenced by an existing alerting policy will fail to be - deleted in a delete operation. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.DeleteNotificationChannelRequest) - ), -) -_sym_db.RegisterMessage(DeleteNotificationChannelRequest) - -SendNotificationChannelVerificationCodeRequest = _reflection.GeneratedProtocolMessageType( - "SendNotificationChannelVerificationCodeRequest", - (_message.Message,), - dict( - DESCRIPTOR=_SENDNOTIFICATIONCHANNELVERIFICATIONCODEREQUEST, - __module__="google.cloud.monitoring_v3.proto.notification_service_pb2", - __doc__="""The ``SendNotificationChannelVerificationCode`` request. - - - Attributes: - name: - The notification channel to which to send a verification code. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.SendNotificationChannelVerificationCodeRequest) - ), -) -_sym_db.RegisterMessage(SendNotificationChannelVerificationCodeRequest) - -GetNotificationChannelVerificationCodeRequest = _reflection.GeneratedProtocolMessageType( - "GetNotificationChannelVerificationCodeRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETNOTIFICATIONCHANNELVERIFICATIONCODEREQUEST, - __module__="google.cloud.monitoring_v3.proto.notification_service_pb2", - __doc__="""The ``GetNotificationChannelVerificationCode`` request. - - - Attributes: - name: - The notification channel for which a verification code is to - be generated and retrieved. This must name a channel that is - already verified; if the specified channel is not verified, - the request will fail. - expire_time: - The desired expiration time. If specified, the API will - guarantee that the returned code will not be valid after the - specified timestamp; however, the API cannot guarantee that - the returned code will be valid for at least as long as the - requested time (the API puts an upper bound on the amount of - time for which a code may be valid). If omitted, a default - expiration will be used, which may be less than the max - permissible expiration (so specifying an expiration may extend - the code's lifetime over omitting an expiration, even though - the API does impose an upper limit on the maximum expiration - that is permitted). - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.GetNotificationChannelVerificationCodeRequest) - ), -) -_sym_db.RegisterMessage(GetNotificationChannelVerificationCodeRequest) - -GetNotificationChannelVerificationCodeResponse = _reflection.GeneratedProtocolMessageType( - "GetNotificationChannelVerificationCodeResponse", - (_message.Message,), - dict( - DESCRIPTOR=_GETNOTIFICATIONCHANNELVERIFICATIONCODERESPONSE, - __module__="google.cloud.monitoring_v3.proto.notification_service_pb2", - __doc__="""The ``GetNotificationChannelVerificationCode`` request. - - - Attributes: - code: - The verification code, which may be used to verify other - channels that have an equivalent identity (i.e. other channels - of the same type with the same fingerprint such as other email - channels with the same email address or other sms channels - with the same number). - expire_time: - The expiration time associated with the code that was - returned. If an expiration was provided in the request, this - is the minimum of the requested expiration in the request and - the max permitted expiration. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.GetNotificationChannelVerificationCodeResponse) - ), -) -_sym_db.RegisterMessage(GetNotificationChannelVerificationCodeResponse) - -VerifyNotificationChannelRequest = _reflection.GeneratedProtocolMessageType( - "VerifyNotificationChannelRequest", - (_message.Message,), - dict( - DESCRIPTOR=_VERIFYNOTIFICATIONCHANNELREQUEST, - __module__="google.cloud.monitoring_v3.proto.notification_service_pb2", - __doc__="""The ``VerifyNotificationChannel`` request. - - - Attributes: - name: - The notification channel to verify. - code: - The verification code that was delivered to the channel as a - result of invoking the - ``SendNotificationChannelVerificationCode`` API method or that - was retrieved from a verified channel via - ``GetNotificationChannelVerificationCode``. For example, one - might have "G-123456" or "TKNZGhhd2EyN3I1MnRnMjRv" (in - general, one is only guaranteed that the code is valid UTF-8; - one should not make any assumptions regarding the structure or - format of the code). - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.VerifyNotificationChannelRequest) - ), -) -_sym_db.RegisterMessage(VerifyNotificationChannelRequest) - - -DESCRIPTOR._options = None - -_NOTIFICATIONCHANNELSERVICE = _descriptor.ServiceDescriptor( - name="NotificationChannelService", - full_name="google.monitoring.v3.NotificationChannelService", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\031monitoring.googleapis.com\322A\211\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.read" - ), - serialized_start=1634, - serialized_end=3913, - methods=[ - _descriptor.MethodDescriptor( - name="ListNotificationChannelDescriptors", - full_name="google.monitoring.v3.NotificationChannelService.ListNotificationChannelDescriptors", - index=0, - containing_service=None, - input_type=_LISTNOTIFICATIONCHANNELDESCRIPTORSREQUEST, - output_type=_LISTNOTIFICATIONCHANNELDESCRIPTORSRESPONSE, - serialized_options=_b( - "\202\323\344\223\0026\0224/v3/{name=projects/*}/notificationChannelDescriptors" - ), - ), - _descriptor.MethodDescriptor( - name="GetNotificationChannelDescriptor", - full_name="google.monitoring.v3.NotificationChannelService.GetNotificationChannelDescriptor", - index=1, - containing_service=None, - input_type=_GETNOTIFICATIONCHANNELDESCRIPTORREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2._NOTIFICATIONCHANNELDESCRIPTOR, - serialized_options=_b( - "\202\323\344\223\0028\0226/v3/{name=projects/*/notificationChannelDescriptors/*}" - ), - ), - _descriptor.MethodDescriptor( - name="ListNotificationChannels", - full_name="google.monitoring.v3.NotificationChannelService.ListNotificationChannels", - index=2, - containing_service=None, - input_type=_LISTNOTIFICATIONCHANNELSREQUEST, - output_type=_LISTNOTIFICATIONCHANNELSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002,\022*/v3/{name=projects/*}/notificationChannels" - ), - ), - _descriptor.MethodDescriptor( - name="GetNotificationChannel", - full_name="google.monitoring.v3.NotificationChannelService.GetNotificationChannel", - index=3, - containing_service=None, - input_type=_GETNOTIFICATIONCHANNELREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2._NOTIFICATIONCHANNEL, - serialized_options=_b( - "\202\323\344\223\002.\022,/v3/{name=projects/*/notificationChannels/*}" - ), - ), - _descriptor.MethodDescriptor( - name="CreateNotificationChannel", - full_name="google.monitoring.v3.NotificationChannelService.CreateNotificationChannel", - index=4, - containing_service=None, - input_type=_CREATENOTIFICATIONCHANNELREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2._NOTIFICATIONCHANNEL, - serialized_options=_b( - '\202\323\344\223\002B"*/v3/{name=projects/*}/notificationChannels:\024notification_channel' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateNotificationChannel", - full_name="google.monitoring.v3.NotificationChannelService.UpdateNotificationChannel", - index=5, - containing_service=None, - input_type=_UPDATENOTIFICATIONCHANNELREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2._NOTIFICATIONCHANNEL, - serialized_options=_b( - "\202\323\344\223\002Y2A/v3/{notification_channel.name=projects/*/notificationChannels/*}:\024notification_channel" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteNotificationChannel", - full_name="google.monitoring.v3.NotificationChannelService.DeleteNotificationChannel", - index=6, - containing_service=None, - input_type=_DELETENOTIFICATIONCHANNELREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002.*,/v3/{name=projects/*/notificationChannels/*}" - ), - ), - _descriptor.MethodDescriptor( - name="SendNotificationChannelVerificationCode", - full_name="google.monitoring.v3.NotificationChannelService.SendNotificationChannelVerificationCode", - index=7, - containing_service=None, - input_type=_SENDNOTIFICATIONCHANNELVERIFICATIONCODEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002F"A/v3/{name=projects/*/notificationChannels/*}:sendVerificationCode:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="GetNotificationChannelVerificationCode", - full_name="google.monitoring.v3.NotificationChannelService.GetNotificationChannelVerificationCode", - index=8, - containing_service=None, - input_type=_GETNOTIFICATIONCHANNELVERIFICATIONCODEREQUEST, - output_type=_GETNOTIFICATIONCHANNELVERIFICATIONCODERESPONSE, - serialized_options=_b( - '\202\323\344\223\002E"@/v3/{name=projects/*/notificationChannels/*}:getVerificationCode:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="VerifyNotificationChannel", - full_name="google.monitoring.v3.NotificationChannelService.VerifyNotificationChannel", - index=9, - containing_service=None, - input_type=_VERIFYNOTIFICATIONCHANNELREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2._NOTIFICATIONCHANNEL, - serialized_options=_b( - '\202\323\344\223\0028"3/v3/{name=projects/*/notificationChannels/*}:verify:\001*' - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_NOTIFICATIONCHANNELSERVICE) - -DESCRIPTOR.services_by_name["NotificationChannelService"] = _NOTIFICATIONCHANNELSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/notification_service_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/notification_service_pb2_grpc.py deleted file mode 100644 index ba8b68d03c70..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/notification_service_pb2_grpc.py +++ /dev/null @@ -1,239 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.monitoring_v3.proto import ( - notification_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2, -) -from google.cloud.monitoring_v3.proto import ( - notification_service_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class NotificationChannelServiceStub(object): - """The Notification Channel API provides access to configuration that - controls how messages related to incidents are sent. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListNotificationChannelDescriptors = channel.unary_unary( - "/google.monitoring.v3.NotificationChannelService/ListNotificationChannelDescriptors", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.ListNotificationChannelDescriptorsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.ListNotificationChannelDescriptorsResponse.FromString, - ) - self.GetNotificationChannelDescriptor = channel.unary_unary( - "/google.monitoring.v3.NotificationChannelService/GetNotificationChannelDescriptor", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.GetNotificationChannelDescriptorRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2.NotificationChannelDescriptor.FromString, - ) - self.ListNotificationChannels = channel.unary_unary( - "/google.monitoring.v3.NotificationChannelService/ListNotificationChannels", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.ListNotificationChannelsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.ListNotificationChannelsResponse.FromString, - ) - self.GetNotificationChannel = channel.unary_unary( - "/google.monitoring.v3.NotificationChannelService/GetNotificationChannel", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.GetNotificationChannelRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2.NotificationChannel.FromString, - ) - self.CreateNotificationChannel = channel.unary_unary( - "/google.monitoring.v3.NotificationChannelService/CreateNotificationChannel", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.CreateNotificationChannelRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2.NotificationChannel.FromString, - ) - self.UpdateNotificationChannel = channel.unary_unary( - "/google.monitoring.v3.NotificationChannelService/UpdateNotificationChannel", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.UpdateNotificationChannelRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2.NotificationChannel.FromString, - ) - self.DeleteNotificationChannel = channel.unary_unary( - "/google.monitoring.v3.NotificationChannelService/DeleteNotificationChannel", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.DeleteNotificationChannelRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.SendNotificationChannelVerificationCode = channel.unary_unary( - "/google.monitoring.v3.NotificationChannelService/SendNotificationChannelVerificationCode", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.SendNotificationChannelVerificationCodeRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.GetNotificationChannelVerificationCode = channel.unary_unary( - "/google.monitoring.v3.NotificationChannelService/GetNotificationChannelVerificationCode", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.GetNotificationChannelVerificationCodeRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.GetNotificationChannelVerificationCodeResponse.FromString, - ) - self.VerifyNotificationChannel = channel.unary_unary( - "/google.monitoring.v3.NotificationChannelService/VerifyNotificationChannel", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.VerifyNotificationChannelRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2.NotificationChannel.FromString, - ) - - -class NotificationChannelServiceServicer(object): - """The Notification Channel API provides access to configuration that - controls how messages related to incidents are sent. - """ - - def ListNotificationChannelDescriptors(self, request, context): - """Lists the descriptors for supported channel types. The use of descriptors - makes it possible for new channel types to be dynamically added. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetNotificationChannelDescriptor(self, request, context): - """Gets a single channel descriptor. The descriptor indicates which fields - are expected / permitted for a notification channel of the given type. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListNotificationChannels(self, request, context): - """Lists the notification channels that have been created for the project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetNotificationChannel(self, request, context): - """Gets a single notification channel. The channel includes the relevant - configuration details with which the channel was created. However, the - response may truncate or omit passwords, API keys, or other private key - matter and thus the response may not be 100% identical to the information - that was supplied in the call to the create method. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateNotificationChannel(self, request, context): - """Creates a new notification channel, representing a single notification - endpoint such as an email address, SMS number, or PagerDuty service. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateNotificationChannel(self, request, context): - """Updates a notification channel. Fields not specified in the field mask - remain unchanged. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteNotificationChannel(self, request, context): - """Deletes a notification channel. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def SendNotificationChannelVerificationCode(self, request, context): - """Causes a verification code to be delivered to the channel. The code - can then be supplied in `VerifyNotificationChannel` to verify the channel. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetNotificationChannelVerificationCode(self, request, context): - """Requests a verification code for an already verified channel that can then - be used in a call to VerifyNotificationChannel() on a different channel - with an equivalent identity in the same or in a different project. This - makes it possible to copy a channel between projects without requiring - manual reverification of the channel. If the channel is not in the - verified state, this method will fail (in other words, this may only be - used if the SendNotificationChannelVerificationCode and - VerifyNotificationChannel paths have already been used to put the given - channel into the verified state). - - There is no guarantee that the verification codes returned by this method - will be of a similar structure or form as the ones that are delivered - to the channel via SendNotificationChannelVerificationCode; while - VerifyNotificationChannel() will recognize both the codes delivered via - SendNotificationChannelVerificationCode() and returned from - GetNotificationChannelVerificationCode(), it is typically the case that - the verification codes delivered via - SendNotificationChannelVerificationCode() will be shorter and also - have a shorter expiration (e.g. codes such as "G-123456") whereas - GetVerificationCode() will typically return a much longer, websafe base - 64 encoded string that has a longer expiration time. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def VerifyNotificationChannel(self, request, context): - """Verifies a `NotificationChannel` by proving receipt of the code - delivered to the channel as a result of calling - `SendNotificationChannelVerificationCode`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_NotificationChannelServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "ListNotificationChannelDescriptors": grpc.unary_unary_rpc_method_handler( - servicer.ListNotificationChannelDescriptors, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.ListNotificationChannelDescriptorsRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.ListNotificationChannelDescriptorsResponse.SerializeToString, - ), - "GetNotificationChannelDescriptor": grpc.unary_unary_rpc_method_handler( - servicer.GetNotificationChannelDescriptor, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.GetNotificationChannelDescriptorRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2.NotificationChannelDescriptor.SerializeToString, - ), - "ListNotificationChannels": grpc.unary_unary_rpc_method_handler( - servicer.ListNotificationChannels, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.ListNotificationChannelsRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.ListNotificationChannelsResponse.SerializeToString, - ), - "GetNotificationChannel": grpc.unary_unary_rpc_method_handler( - servicer.GetNotificationChannel, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.GetNotificationChannelRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2.NotificationChannel.SerializeToString, - ), - "CreateNotificationChannel": grpc.unary_unary_rpc_method_handler( - servicer.CreateNotificationChannel, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.CreateNotificationChannelRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2.NotificationChannel.SerializeToString, - ), - "UpdateNotificationChannel": grpc.unary_unary_rpc_method_handler( - servicer.UpdateNotificationChannel, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.UpdateNotificationChannelRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2.NotificationChannel.SerializeToString, - ), - "DeleteNotificationChannel": grpc.unary_unary_rpc_method_handler( - servicer.DeleteNotificationChannel, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.DeleteNotificationChannelRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "SendNotificationChannelVerificationCode": grpc.unary_unary_rpc_method_handler( - servicer.SendNotificationChannelVerificationCode, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.SendNotificationChannelVerificationCodeRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "GetNotificationChannelVerificationCode": grpc.unary_unary_rpc_method_handler( - servicer.GetNotificationChannelVerificationCode, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.GetNotificationChannelVerificationCodeRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.GetNotificationChannelVerificationCodeResponse.SerializeToString, - ), - "VerifyNotificationChannel": grpc.unary_unary_rpc_method_handler( - servicer.VerifyNotificationChannel, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__service__pb2.VerifyNotificationChannelRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_notification__pb2.NotificationChannel.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.monitoring.v3.NotificationChannelService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/monitoring/google/cloud/monitoring_v3/proto/service.proto b/monitoring/google/cloud/monitoring_v3/proto/service.proto deleted file mode 100644 index b0daa551e80b..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/service.proto +++ /dev/null @@ -1,379 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -import "google/api/monitored_resource.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; -import "google/type/calendar_period.proto"; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "ServiceMonitoringProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// A `Service` is a discrete, autonomous, and network-accessible unit, designed -// to solve an individual concern -// ([Wikipedia](https://en.wikipedia.org/wiki/Service-orientation)). In -// Stackdriver Monitoring, a `Service` acts as the root resource under which -// operational aspects of the service are accessible. -message Service { - // Custom view of service telemetry. Currently a place-holder pending final - // design. - message Custom { - - } - - // App Engine service. Learn more at https://cloud.google.com/appengine. - message AppEngine { - // The ID of the App Engine module underlying this service. Corresponds to - // the `module_id` resource label in the `gae_app` monitored resource: - // https://cloud.google.com/monitoring/api/resources#tag_gae_app - string module_id = 1; - } - - // Cloud Endpoints service. Learn more at https://cloud.google.com/endpoints. - message CloudEndpoints { - // The name of the Cloud Endpoints service underlying this service. - // Corresponds to the `service` resource label in the `api` monitored - // resource: https://cloud.google.com/monitoring/api/resources#tag_api - string service = 1; - } - - // Istio service. Learn more at http://istio.io. - message ClusterIstio { - // The location of the Kubernetes cluster in which this Istio service is - // defined. Corresponds to the `location` resource label in `k8s_cluster` - // resources. - string location = 1; - - // The name of the Kubernetes cluster in which this Istio service is - // defined. Corresponds to the `cluster_name` resource label in - // `k8s_cluster` resources. - string cluster_name = 2; - - // The namespace of the Istio service underlying this service. Corresponds - // to the `destination_service_namespace` metric label in Istio metrics. - string service_namespace = 3; - - // The name of the Istio service underlying this service. Corresponds to the - // `destination_service_name` metric label in Istio metrics. - string service_name = 4; - } - - // Configuration for how to query telemetry on a Service. - message Telemetry { - // The full name of the resource that defines this service. Formatted as - // described in https://cloud.google.com/apis/design/resource_names. - string resource_name = 1; - } - - // Resource name for this Service. Of the form - // `projects/{project_id}/services/{service_id}`. - string name = 1; - - // Name used for UI elements listing this Service. - string display_name = 2; - - // REQUIRED. Service-identifying atoms specifying the underlying service. - oneof identifier { - // Custom service type. - Custom custom = 6; - - // Type used for App Engine services. - AppEngine app_engine = 7; - - // Type used for Cloud Endpoints services. - CloudEndpoints cloud_endpoints = 8; - - // Type used for Istio services that live in a Kubernetes cluster. - ClusterIstio cluster_istio = 9; - } - - // Configuration for how to query telemetry on a Service. - Telemetry telemetry = 13; -} - -// A Service-Level Objective (SLO) describes a level of desired good service. It -// consists of a service-level indicator (SLI), a performance goal, and a period -// over which the objective is to be evaluated against that goal. The SLO can -// use SLIs defined in a number of different manners. Typical SLOs might include -// "99% of requests in each rolling week have latency below 200 milliseconds" or -// "99.5% of requests in each calendar month return successfully." -message ServiceLevelObjective { - // `ServiceLevelObjective.View` determines what form of - // `ServiceLevelObjective` is returned from `GetServiceLevelObjective`, - // `ListServiceLevelObjectives`, and `ListServiceLevelObjectiveVersions` RPCs. - enum View { - // Same as FULL. - VIEW_UNSPECIFIED = 0; - - // Return the embedded `ServiceLevelIndicator` in the form in which it was - // defined. If it was defined using a `BasicSli`, return that `BasicSli`. - FULL = 2; - - // For `ServiceLevelIndicator`s using `BasicSli` articulation, instead - // return the `ServiceLevelIndicator` with its mode of computation fully - // spelled out as a `RequestBasedSli`. For `ServiceLevelIndicator`s using - // `RequestBasedSli` or `WindowsBasedSli`, return the - // `ServiceLevelIndicator` as it was provided. - EXPLICIT = 1; - } - - // Resource name for this `ServiceLevelObjective`. - // Of the form - // `projects/{project_id}/services/{service_id}/serviceLevelObjectives/{slo_name}`. - string name = 1; - - // Name used for UI elements listing this SLO. - string display_name = 11; - - // The definition of good service, used to measure and calculate the quality - // of the `Service`'s performance with respect to a single aspect of service - // quality. - ServiceLevelIndicator service_level_indicator = 3; - - // The fraction of service that must be good in order for this objective to be - // met. `0 < goal <= 0.999`. - double goal = 4; - - // The time period over which the objective will be evaluated. - oneof period { - // A rolling time period, semantically "in the past ``". - // Must be an integer multiple of 1 day no larger than 30 days. - google.protobuf.Duration rolling_period = 5; - - // A calendar period, semantically "since the start of the current - // ``". At this time, only `DAY`, `WEEK`, `FORTNIGHT`, and - // `MONTH` are supported. - google.type.CalendarPeriod calendar_period = 6; - } -} - -// A Service-Level Indicator (SLI) describes the "performance" of a service. For -// some services, the SLI is well-defined. In such cases, the SLI can be -// described easily by referencing the well-known SLI and providing the needed -// parameters. Alternatively, a "custom" SLI can be defined with a query to the -// underlying metric store. An SLI is defined to be `good_service / -// total_service` over any queried time interval. The value of performance -// always falls into the range `0 <= performance <= 1`. A custom SLI describes -// how to compute this ratio, whether this is by dividing values from a pair of -// time series, cutting a `Distribution` into good and bad counts, or counting -// time windows in which the service complies with a criterion. For separation -// of concerns, a single Service-Level Indicator measures performance for only -// one aspect of service quality, such as fraction of successful queries or -// fast-enough queries. -message ServiceLevelIndicator { - // Service level indicators can be grouped by whether the "unit" of service - // being measured is based on counts of good requests or on counts of good - // time windows - oneof type { - // Basic SLI on a well-known service type. - BasicSli basic_sli = 4; - - // Request-based SLIs - RequestBasedSli request_based = 1; - - // Windows-based SLIs - WindowsBasedSli windows_based = 2; - } -} - -// An SLI measuring performance on a well-known service type. Performance will -// be computed on the basis of pre-defined metrics. The type of the -// `service_resource` determines the metrics to use and the -// `service_resource.labels` and `metric_labels` are used to construct a -// monitoring filter to filter that metric down to just the data relevant to -// this service. -message BasicSli { - // Future parameters for the availability SLI. - message AvailabilityCriteria { - - } - - // Parameters for a latency threshold SLI. - message LatencyCriteria { - // Good service is defined to be the count of requests made to this service - // that return in no more than `threshold`. - google.protobuf.Duration threshold = 3; - } - - // OPTIONAL: The set of RPCs to which this SLI is relevant. Telemetry from - // other methods will not be used to calculate performance for this SLI. If - // omitted, this SLI applies to all the Service's methods. For service types - // that don't support breaking down by method, setting this field will result - // in an error. - repeated string method = 7; - - // OPTIONAL: The set of locations to which this SLI is relevant. Telemetry - // from other locations will not be used to calculate performance for this - // SLI. If omitted, this SLI applies to all locations in which the Service has - // activity. For service types that don't support breaking down by location, - // setting this field will result in an error. - repeated string location = 8; - - // OPTIONAL: The set of API versions to which this SLI is relevant. Telemetry - // from other API versions will not be used to calculate performance for this - // SLI. If omitted, this SLI applies to all API versions. For service types - // that don't support breaking down by version, setting this field will result - // in an error. - repeated string version = 9; - - // This SLI can be evaluated on the basis of availability or latency. - oneof sli_criteria { - // Good service is defined to be the count of requests made to this service - // that return successfully. - AvailabilityCriteria availability = 2; - - // Good service is defined to be the count of requests made to this service - // that are fast enough with respect to `latency.threshold`. - LatencyCriteria latency = 3; - } -} - -// Range of numerical values, inclusive of `min` and exclusive of `max`. If the -// open range "< range.max" is desired, set `range.min = -infinity`. If the open -// range ">= range.min" is desired, set `range.max = infinity`. -message Range { - // Range minimum. - double min = 1; - - // Range maximum. - double max = 2; -} - -// Service Level Indicators for which atomic units of service are counted -// directly. -message RequestBasedSli { - // The means to compute a ratio of `good_service` to `total_service`. - oneof method { - // `good_total_ratio` is used when the ratio of `good_service` to - // `total_service` is computed from two `TimeSeries`. - TimeSeriesRatio good_total_ratio = 1; - - // `distribution_cut` is used when `good_service` is a count of values - // aggregated in a `Distribution` that fall into a good range. The - // `total_service` is the total count of all values aggregated in the - // `Distribution`. - DistributionCut distribution_cut = 3; - } -} - -// A `TimeSeriesRatio` specifies two `TimeSeries` to use for computing the -// `good_service / total_service` ratio. The specified `TimeSeries` must have -// `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = -// DELTA` or `MetricKind = CUMULATIVE`. The `TimeSeriesRatio` must specify -// exactly two of good, bad, and total, and the relationship `good_service + -// bad_service = total_service` will be assumed. -message TimeSeriesRatio { - // A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) - // specifying a `TimeSeries` quantifying good service provided. Must have - // `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = - // DELTA` or `MetricKind = CUMULATIVE`. - string good_service_filter = 4; - - // A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) - // specifying a `TimeSeries` quantifying bad service, either demanded service - // that was not provided or demanded service that was of inadequate quality. - // Must have `ValueType = DOUBLE` or `ValueType = INT64` and must have - // `MetricKind = DELTA` or `MetricKind = CUMULATIVE`. - string bad_service_filter = 5; - - // A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) - // specifying a `TimeSeries` quantifying total demanded service. Must have - // `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = - // DELTA` or `MetricKind = CUMULATIVE`. - string total_service_filter = 6; -} - -// A `DistributionCut` defines a `TimeSeries` and thresholds used for measuring -// good service and total service. The `TimeSeries` must have `ValueType = -// DISTRIBUTION` and `MetricKind = DELTA` or `MetricKind = CUMULATIVE`. The -// computed `good_service` will be the count of values x in the `Distribution` -// such that `range.min <= x < range.max`. -message DistributionCut { - // A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) - // specifying a `TimeSeries` aggregating values. Must have `ValueType = - // DISTRIBUTION` and `MetricKind = DELTA` or `MetricKind = CUMULATIVE`. - string distribution_filter = 4; - - // Range of values considered "good." For a one-sided range, set one bound to - // an infinite value. - Range range = 5; -} - -// A `WindowsBasedSli` defines `good_service` as the count of time windows for -// which the provided service was of good quality. Criteria for determining -// if service was good are embedded in the `window_criterion`. -message WindowsBasedSli { - // A `PerformanceThreshold` is used when each window is good when that window - // has a sufficiently high `performance`. - message PerformanceThreshold { - // The means, either a request-based SLI or a basic SLI, by which to compute - // performance over a window. - oneof type { - // `RequestBasedSli` to evaluate to judge window quality. - RequestBasedSli performance = 1; - - // `BasicSli` to evaluate to judge window quality. - BasicSli basic_sli_performance = 3; - } - - // If window `performance >= threshold`, the window is counted as good. - double threshold = 2; - } - - // A `MetricRange` is used when each window is good when the value x of a - // single `TimeSeries` satisfies `range.min <= x < range.max`. The provided - // `TimeSeries` must have `ValueType = INT64` or `ValueType = DOUBLE` and - // `MetricKind = GAUGE`. - message MetricRange { - // A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) - // specifying the `TimeSeries` to use for evaluating window quality. - string time_series = 1; - - // Range of values considered "good." For a one-sided range, set one bound - // to an infinite value. - Range range = 4; - } - - // The criterion to use for evaluating window goodness. - oneof window_criterion { - // A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) - // specifying a `TimeSeries` with `ValueType = BOOL`. The window is good if - // any `true` values appear in the window. - string good_bad_metric_filter = 5; - - // A window is good if its `performance` is high enough. - PerformanceThreshold good_total_ratio_threshold = 2; - - // A window is good if the metric's value is in a good range, averaged - // across returned streams. - MetricRange metric_mean_in_range = 6; - - // A window is good if the metric's value is in a good range, summed across - // returned streams. - MetricRange metric_sum_in_range = 7; - } - - // Duration over which window quality is evaluated. Must be an integer - // fraction of a day and at least `60s`. - google.protobuf.Duration window_period = 4; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/service_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/service_pb2.py deleted file mode 100644 index b5a6ffcfccc8..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/service_pb2.py +++ /dev/null @@ -1,2072 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/service.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.type import calendar_period_pb2 as google_dot_type_dot_calendar__period__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/service.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\026ServiceMonitoringProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n.google/cloud/monitoring_v3/proto/service.proto\x12\x14google.monitoring.v3\x1a#google/api/monitored_resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a!google/type/calendar_period.proto"\xd6\x04\n\x07Service\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x36\n\x06\x63ustom\x18\x06 \x01(\x0b\x32$.google.monitoring.v3.Service.CustomH\x00\x12=\n\napp_engine\x18\x07 \x01(\x0b\x32\'.google.monitoring.v3.Service.AppEngineH\x00\x12G\n\x0f\x63loud_endpoints\x18\x08 \x01(\x0b\x32,.google.monitoring.v3.Service.CloudEndpointsH\x00\x12\x43\n\rcluster_istio\x18\t \x01(\x0b\x32*.google.monitoring.v3.Service.ClusterIstioH\x00\x12:\n\ttelemetry\x18\r \x01(\x0b\x32\'.google.monitoring.v3.Service.Telemetry\x1a\x08\n\x06\x43ustom\x1a\x1e\n\tAppEngine\x12\x11\n\tmodule_id\x18\x01 \x01(\t\x1a!\n\x0e\x43loudEndpoints\x12\x0f\n\x07service\x18\x01 \x01(\t\x1ag\n\x0c\x43lusterIstio\x12\x10\n\x08location\x18\x01 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x19\n\x11service_namespace\x18\x03 \x01(\t\x12\x14\n\x0cservice_name\x18\x04 \x01(\t\x1a"\n\tTelemetry\x12\x15\n\rresource_name\x18\x01 \x01(\tB\x0c\n\nidentifier"\xc4\x02\n\x15ServiceLevelObjective\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x0b \x01(\t\x12L\n\x17service_level_indicator\x18\x03 \x01(\x0b\x32+.google.monitoring.v3.ServiceLevelIndicator\x12\x0c\n\x04goal\x18\x04 \x01(\x01\x12\x33\n\x0erolling_period\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12\x36\n\x0f\x63\x61lendar_period\x18\x06 \x01(\x0e\x32\x1b.google.type.CalendarPeriodH\x00"4\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x02\x12\x0c\n\x08\x45XPLICIT\x10\x01\x42\x08\n\x06period"\xd4\x01\n\x15ServiceLevelIndicator\x12\x33\n\tbasic_sli\x18\x04 \x01(\x0b\x32\x1e.google.monitoring.v3.BasicSliH\x00\x12>\n\rrequest_based\x18\x01 \x01(\x0b\x32%.google.monitoring.v3.RequestBasedSliH\x00\x12>\n\rwindows_based\x18\x02 \x01(\x0b\x32%.google.monitoring.v3.WindowsBasedSliH\x00\x42\x06\n\x04type"\xb6\x02\n\x08\x42\x61sicSli\x12\x0e\n\x06method\x18\x07 \x03(\t\x12\x10\n\x08location\x18\x08 \x03(\t\x12\x0f\n\x07version\x18\t \x03(\t\x12K\n\x0c\x61vailability\x18\x02 \x01(\x0b\x32\x33.google.monitoring.v3.BasicSli.AvailabilityCriteriaH\x00\x12\x41\n\x07latency\x18\x03 \x01(\x0b\x32..google.monitoring.v3.BasicSli.LatencyCriteriaH\x00\x1a\x16\n\x14\x41vailabilityCriteria\x1a?\n\x0fLatencyCriteria\x12,\n\tthreshold\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x0e\n\x0csli_criteria"!\n\x05Range\x12\x0b\n\x03min\x18\x01 \x01(\x01\x12\x0b\n\x03max\x18\x02 \x01(\x01"\xa1\x01\n\x0fRequestBasedSli\x12\x41\n\x10good_total_ratio\x18\x01 \x01(\x0b\x32%.google.monitoring.v3.TimeSeriesRatioH\x00\x12\x41\n\x10\x64istribution_cut\x18\x03 \x01(\x0b\x32%.google.monitoring.v3.DistributionCutH\x00\x42\x08\n\x06method"h\n\x0fTimeSeriesRatio\x12\x1b\n\x13good_service_filter\x18\x04 \x01(\t\x12\x1a\n\x12\x62\x61\x64_service_filter\x18\x05 \x01(\t\x12\x1c\n\x14total_service_filter\x18\x06 \x01(\t"Z\n\x0f\x44istributionCut\x12\x1b\n\x13\x64istribution_filter\x18\x04 \x01(\t\x12*\n\x05range\x18\x05 \x01(\x0b\x32\x1b.google.monitoring.v3.Range"\x83\x05\n\x0fWindowsBasedSli\x12 \n\x16good_bad_metric_filter\x18\x05 \x01(\tH\x00\x12`\n\x1agood_total_ratio_threshold\x18\x02 \x01(\x0b\x32:.google.monitoring.v3.WindowsBasedSli.PerformanceThresholdH\x00\x12Q\n\x14metric_mean_in_range\x18\x06 \x01(\x0b\x32\x31.google.monitoring.v3.WindowsBasedSli.MetricRangeH\x00\x12P\n\x13metric_sum_in_range\x18\x07 \x01(\x0b\x32\x31.google.monitoring.v3.WindowsBasedSli.MetricRangeH\x00\x12\x30\n\rwindow_period\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x1a\xb0\x01\n\x14PerformanceThreshold\x12<\n\x0bperformance\x18\x01 \x01(\x0b\x32%.google.monitoring.v3.RequestBasedSliH\x00\x12?\n\x15\x62\x61sic_sli_performance\x18\x03 \x01(\x0b\x32\x1e.google.monitoring.v3.BasicSliH\x00\x12\x11\n\tthreshold\x18\x02 \x01(\x01\x42\x06\n\x04type\x1aN\n\x0bMetricRange\x12\x13\n\x0btime_series\x18\x01 \x01(\t\x12*\n\x05range\x18\x04 \x01(\x0b\x32\x1b.google.monitoring.v3.RangeB\x12\n\x10window_criterionB\xae\x01\n\x18\x63om.google.monitoring.v3B\x16ServiceMonitoringProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_type_dot_calendar__period__pb2.DESCRIPTOR, - ], -) - - -_SERVICELEVELOBJECTIVE_VIEW = _descriptor.EnumDescriptor( - name="View", - full_name="google.monitoring.v3.ServiceLevelObjective.View", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="VIEW_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="FULL", index=1, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="EXPLICIT", index=2, number=1, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1073, - serialized_end=1125, -) -_sym_db.RegisterEnumDescriptor(_SERVICELEVELOBJECTIVE_VIEW) - - -_SERVICE_CUSTOM = _descriptor.Descriptor( - name="Custom", - full_name="google.monitoring.v3.Service.Custom", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=578, - serialized_end=586, -) - -_SERVICE_APPENGINE = _descriptor.Descriptor( - name="AppEngine", - full_name="google.monitoring.v3.Service.AppEngine", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="module_id", - full_name="google.monitoring.v3.Service.AppEngine.module_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=588, - serialized_end=618, -) - -_SERVICE_CLOUDENDPOINTS = _descriptor.Descriptor( - name="CloudEndpoints", - full_name="google.monitoring.v3.Service.CloudEndpoints", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="service", - full_name="google.monitoring.v3.Service.CloudEndpoints.service", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=620, - serialized_end=653, -) - -_SERVICE_CLUSTERISTIO = _descriptor.Descriptor( - name="ClusterIstio", - full_name="google.monitoring.v3.Service.ClusterIstio", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="location", - full_name="google.monitoring.v3.Service.ClusterIstio.location", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_name", - full_name="google.monitoring.v3.Service.ClusterIstio.cluster_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_namespace", - full_name="google.monitoring.v3.Service.ClusterIstio.service_namespace", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_name", - full_name="google.monitoring.v3.Service.ClusterIstio.service_name", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=655, - serialized_end=758, -) - -_SERVICE_TELEMETRY = _descriptor.Descriptor( - name="Telemetry", - full_name="google.monitoring.v3.Service.Telemetry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="resource_name", - full_name="google.monitoring.v3.Service.Telemetry.resource_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=760, - serialized_end=794, -) - -_SERVICE = _descriptor.Descriptor( - name="Service", - full_name="google.monitoring.v3.Service", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.Service.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.monitoring.v3.Service.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="custom", - full_name="google.monitoring.v3.Service.custom", - index=2, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="app_engine", - full_name="google.monitoring.v3.Service.app_engine", - index=3, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cloud_endpoints", - full_name="google.monitoring.v3.Service.cloud_endpoints", - index=4, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_istio", - full_name="google.monitoring.v3.Service.cluster_istio", - index=5, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="telemetry", - full_name="google.monitoring.v3.Service.telemetry", - index=6, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _SERVICE_CUSTOM, - _SERVICE_APPENGINE, - _SERVICE_CLOUDENDPOINTS, - _SERVICE_CLUSTERISTIO, - _SERVICE_TELEMETRY, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="identifier", - full_name="google.monitoring.v3.Service.identifier", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=210, - serialized_end=808, -) - - -_SERVICELEVELOBJECTIVE = _descriptor.Descriptor( - name="ServiceLevelObjective", - full_name="google.monitoring.v3.ServiceLevelObjective", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.ServiceLevelObjective.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.monitoring.v3.ServiceLevelObjective.display_name", - index=1, - number=11, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_level_indicator", - full_name="google.monitoring.v3.ServiceLevelObjective.service_level_indicator", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="goal", - full_name="google.monitoring.v3.ServiceLevelObjective.goal", - index=3, - number=4, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="rolling_period", - full_name="google.monitoring.v3.ServiceLevelObjective.rolling_period", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="calendar_period", - full_name="google.monitoring.v3.ServiceLevelObjective.calendar_period", - index=5, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_SERVICELEVELOBJECTIVE_VIEW], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="period", - full_name="google.monitoring.v3.ServiceLevelObjective.period", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=811, - serialized_end=1135, -) - - -_SERVICELEVELINDICATOR = _descriptor.Descriptor( - name="ServiceLevelIndicator", - full_name="google.monitoring.v3.ServiceLevelIndicator", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="basic_sli", - full_name="google.monitoring.v3.ServiceLevelIndicator.basic_sli", - index=0, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_based", - full_name="google.monitoring.v3.ServiceLevelIndicator.request_based", - index=1, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="windows_based", - full_name="google.monitoring.v3.ServiceLevelIndicator.windows_based", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="google.monitoring.v3.ServiceLevelIndicator.type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=1138, - serialized_end=1350, -) - - -_BASICSLI_AVAILABILITYCRITERIA = _descriptor.Descriptor( - name="AvailabilityCriteria", - full_name="google.monitoring.v3.BasicSli.AvailabilityCriteria", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1560, - serialized_end=1582, -) - -_BASICSLI_LATENCYCRITERIA = _descriptor.Descriptor( - name="LatencyCriteria", - full_name="google.monitoring.v3.BasicSli.LatencyCriteria", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="threshold", - full_name="google.monitoring.v3.BasicSli.LatencyCriteria.threshold", - index=0, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1584, - serialized_end=1647, -) - -_BASICSLI = _descriptor.Descriptor( - name="BasicSli", - full_name="google.monitoring.v3.BasicSli", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="method", - full_name="google.monitoring.v3.BasicSli.method", - index=0, - number=7, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="location", - full_name="google.monitoring.v3.BasicSli.location", - index=1, - number=8, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.monitoring.v3.BasicSli.version", - index=2, - number=9, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="availability", - full_name="google.monitoring.v3.BasicSli.availability", - index=3, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="latency", - full_name="google.monitoring.v3.BasicSli.latency", - index=4, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_BASICSLI_AVAILABILITYCRITERIA, _BASICSLI_LATENCYCRITERIA], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="sli_criteria", - full_name="google.monitoring.v3.BasicSli.sli_criteria", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=1353, - serialized_end=1663, -) - - -_RANGE = _descriptor.Descriptor( - name="Range", - full_name="google.monitoring.v3.Range", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="min", - full_name="google.monitoring.v3.Range.min", - index=0, - number=1, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max", - full_name="google.monitoring.v3.Range.max", - index=1, - number=2, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1665, - serialized_end=1698, -) - - -_REQUESTBASEDSLI = _descriptor.Descriptor( - name="RequestBasedSli", - full_name="google.monitoring.v3.RequestBasedSli", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="good_total_ratio", - full_name="google.monitoring.v3.RequestBasedSli.good_total_ratio", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="distribution_cut", - full_name="google.monitoring.v3.RequestBasedSli.distribution_cut", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="method", - full_name="google.monitoring.v3.RequestBasedSli.method", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=1701, - serialized_end=1862, -) - - -_TIMESERIESRATIO = _descriptor.Descriptor( - name="TimeSeriesRatio", - full_name="google.monitoring.v3.TimeSeriesRatio", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="good_service_filter", - full_name="google.monitoring.v3.TimeSeriesRatio.good_service_filter", - index=0, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bad_service_filter", - full_name="google.monitoring.v3.TimeSeriesRatio.bad_service_filter", - index=1, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="total_service_filter", - full_name="google.monitoring.v3.TimeSeriesRatio.total_service_filter", - index=2, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1864, - serialized_end=1968, -) - - -_DISTRIBUTIONCUT = _descriptor.Descriptor( - name="DistributionCut", - full_name="google.monitoring.v3.DistributionCut", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="distribution_filter", - full_name="google.monitoring.v3.DistributionCut.distribution_filter", - index=0, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="range", - full_name="google.monitoring.v3.DistributionCut.range", - index=1, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1970, - serialized_end=2060, -) - - -_WINDOWSBASEDSLI_PERFORMANCETHRESHOLD = _descriptor.Descriptor( - name="PerformanceThreshold", - full_name="google.monitoring.v3.WindowsBasedSli.PerformanceThreshold", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="performance", - full_name="google.monitoring.v3.WindowsBasedSli.PerformanceThreshold.performance", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="basic_sli_performance", - full_name="google.monitoring.v3.WindowsBasedSli.PerformanceThreshold.basic_sli_performance", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="threshold", - full_name="google.monitoring.v3.WindowsBasedSli.PerformanceThreshold.threshold", - index=2, - number=2, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="google.monitoring.v3.WindowsBasedSli.PerformanceThreshold.type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=2430, - serialized_end=2606, -) - -_WINDOWSBASEDSLI_METRICRANGE = _descriptor.Descriptor( - name="MetricRange", - full_name="google.monitoring.v3.WindowsBasedSli.MetricRange", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="time_series", - full_name="google.monitoring.v3.WindowsBasedSli.MetricRange.time_series", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="range", - full_name="google.monitoring.v3.WindowsBasedSli.MetricRange.range", - index=1, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2608, - serialized_end=2686, -) - -_WINDOWSBASEDSLI = _descriptor.Descriptor( - name="WindowsBasedSli", - full_name="google.monitoring.v3.WindowsBasedSli", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="good_bad_metric_filter", - full_name="google.monitoring.v3.WindowsBasedSli.good_bad_metric_filter", - index=0, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="good_total_ratio_threshold", - full_name="google.monitoring.v3.WindowsBasedSli.good_total_ratio_threshold", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric_mean_in_range", - full_name="google.monitoring.v3.WindowsBasedSli.metric_mean_in_range", - index=2, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric_sum_in_range", - full_name="google.monitoring.v3.WindowsBasedSli.metric_sum_in_range", - index=3, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="window_period", - full_name="google.monitoring.v3.WindowsBasedSli.window_period", - index=4, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_WINDOWSBASEDSLI_PERFORMANCETHRESHOLD, _WINDOWSBASEDSLI_METRICRANGE], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="window_criterion", - full_name="google.monitoring.v3.WindowsBasedSli.window_criterion", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=2063, - serialized_end=2706, -) - -_SERVICE_CUSTOM.containing_type = _SERVICE -_SERVICE_APPENGINE.containing_type = _SERVICE -_SERVICE_CLOUDENDPOINTS.containing_type = _SERVICE -_SERVICE_CLUSTERISTIO.containing_type = _SERVICE -_SERVICE_TELEMETRY.containing_type = _SERVICE -_SERVICE.fields_by_name["custom"].message_type = _SERVICE_CUSTOM -_SERVICE.fields_by_name["app_engine"].message_type = _SERVICE_APPENGINE -_SERVICE.fields_by_name["cloud_endpoints"].message_type = _SERVICE_CLOUDENDPOINTS -_SERVICE.fields_by_name["cluster_istio"].message_type = _SERVICE_CLUSTERISTIO -_SERVICE.fields_by_name["telemetry"].message_type = _SERVICE_TELEMETRY -_SERVICE.oneofs_by_name["identifier"].fields.append(_SERVICE.fields_by_name["custom"]) -_SERVICE.fields_by_name["custom"].containing_oneof = _SERVICE.oneofs_by_name[ - "identifier" -] -_SERVICE.oneofs_by_name["identifier"].fields.append( - _SERVICE.fields_by_name["app_engine"] -) -_SERVICE.fields_by_name["app_engine"].containing_oneof = _SERVICE.oneofs_by_name[ - "identifier" -] -_SERVICE.oneofs_by_name["identifier"].fields.append( - _SERVICE.fields_by_name["cloud_endpoints"] -) -_SERVICE.fields_by_name["cloud_endpoints"].containing_oneof = _SERVICE.oneofs_by_name[ - "identifier" -] -_SERVICE.oneofs_by_name["identifier"].fields.append( - _SERVICE.fields_by_name["cluster_istio"] -) -_SERVICE.fields_by_name["cluster_istio"].containing_oneof = _SERVICE.oneofs_by_name[ - "identifier" -] -_SERVICELEVELOBJECTIVE.fields_by_name[ - "service_level_indicator" -].message_type = _SERVICELEVELINDICATOR -_SERVICELEVELOBJECTIVE.fields_by_name[ - "rolling_period" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_SERVICELEVELOBJECTIVE.fields_by_name[ - "calendar_period" -].enum_type = google_dot_type_dot_calendar__period__pb2._CALENDARPERIOD -_SERVICELEVELOBJECTIVE_VIEW.containing_type = _SERVICELEVELOBJECTIVE -_SERVICELEVELOBJECTIVE.oneofs_by_name["period"].fields.append( - _SERVICELEVELOBJECTIVE.fields_by_name["rolling_period"] -) -_SERVICELEVELOBJECTIVE.fields_by_name[ - "rolling_period" -].containing_oneof = _SERVICELEVELOBJECTIVE.oneofs_by_name["period"] -_SERVICELEVELOBJECTIVE.oneofs_by_name["period"].fields.append( - _SERVICELEVELOBJECTIVE.fields_by_name["calendar_period"] -) -_SERVICELEVELOBJECTIVE.fields_by_name[ - "calendar_period" -].containing_oneof = _SERVICELEVELOBJECTIVE.oneofs_by_name["period"] -_SERVICELEVELINDICATOR.fields_by_name["basic_sli"].message_type = _BASICSLI -_SERVICELEVELINDICATOR.fields_by_name["request_based"].message_type = _REQUESTBASEDSLI -_SERVICELEVELINDICATOR.fields_by_name["windows_based"].message_type = _WINDOWSBASEDSLI -_SERVICELEVELINDICATOR.oneofs_by_name["type"].fields.append( - _SERVICELEVELINDICATOR.fields_by_name["basic_sli"] -) -_SERVICELEVELINDICATOR.fields_by_name[ - "basic_sli" -].containing_oneof = _SERVICELEVELINDICATOR.oneofs_by_name["type"] -_SERVICELEVELINDICATOR.oneofs_by_name["type"].fields.append( - _SERVICELEVELINDICATOR.fields_by_name["request_based"] -) -_SERVICELEVELINDICATOR.fields_by_name[ - "request_based" -].containing_oneof = _SERVICELEVELINDICATOR.oneofs_by_name["type"] -_SERVICELEVELINDICATOR.oneofs_by_name["type"].fields.append( - _SERVICELEVELINDICATOR.fields_by_name["windows_based"] -) -_SERVICELEVELINDICATOR.fields_by_name[ - "windows_based" -].containing_oneof = _SERVICELEVELINDICATOR.oneofs_by_name["type"] -_BASICSLI_AVAILABILITYCRITERIA.containing_type = _BASICSLI -_BASICSLI_LATENCYCRITERIA.fields_by_name[ - "threshold" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_BASICSLI_LATENCYCRITERIA.containing_type = _BASICSLI -_BASICSLI.fields_by_name["availability"].message_type = _BASICSLI_AVAILABILITYCRITERIA -_BASICSLI.fields_by_name["latency"].message_type = _BASICSLI_LATENCYCRITERIA -_BASICSLI.oneofs_by_name["sli_criteria"].fields.append( - _BASICSLI.fields_by_name["availability"] -) -_BASICSLI.fields_by_name["availability"].containing_oneof = _BASICSLI.oneofs_by_name[ - "sli_criteria" -] -_BASICSLI.oneofs_by_name["sli_criteria"].fields.append( - _BASICSLI.fields_by_name["latency"] -) -_BASICSLI.fields_by_name["latency"].containing_oneof = _BASICSLI.oneofs_by_name[ - "sli_criteria" -] -_REQUESTBASEDSLI.fields_by_name["good_total_ratio"].message_type = _TIMESERIESRATIO -_REQUESTBASEDSLI.fields_by_name["distribution_cut"].message_type = _DISTRIBUTIONCUT -_REQUESTBASEDSLI.oneofs_by_name["method"].fields.append( - _REQUESTBASEDSLI.fields_by_name["good_total_ratio"] -) -_REQUESTBASEDSLI.fields_by_name[ - "good_total_ratio" -].containing_oneof = _REQUESTBASEDSLI.oneofs_by_name["method"] -_REQUESTBASEDSLI.oneofs_by_name["method"].fields.append( - _REQUESTBASEDSLI.fields_by_name["distribution_cut"] -) -_REQUESTBASEDSLI.fields_by_name[ - "distribution_cut" -].containing_oneof = _REQUESTBASEDSLI.oneofs_by_name["method"] -_DISTRIBUTIONCUT.fields_by_name["range"].message_type = _RANGE -_WINDOWSBASEDSLI_PERFORMANCETHRESHOLD.fields_by_name[ - "performance" -].message_type = _REQUESTBASEDSLI -_WINDOWSBASEDSLI_PERFORMANCETHRESHOLD.fields_by_name[ - "basic_sli_performance" -].message_type = _BASICSLI -_WINDOWSBASEDSLI_PERFORMANCETHRESHOLD.containing_type = _WINDOWSBASEDSLI -_WINDOWSBASEDSLI_PERFORMANCETHRESHOLD.oneofs_by_name["type"].fields.append( - _WINDOWSBASEDSLI_PERFORMANCETHRESHOLD.fields_by_name["performance"] -) -_WINDOWSBASEDSLI_PERFORMANCETHRESHOLD.fields_by_name[ - "performance" -].containing_oneof = _WINDOWSBASEDSLI_PERFORMANCETHRESHOLD.oneofs_by_name["type"] -_WINDOWSBASEDSLI_PERFORMANCETHRESHOLD.oneofs_by_name["type"].fields.append( - _WINDOWSBASEDSLI_PERFORMANCETHRESHOLD.fields_by_name["basic_sli_performance"] -) -_WINDOWSBASEDSLI_PERFORMANCETHRESHOLD.fields_by_name[ - "basic_sli_performance" -].containing_oneof = _WINDOWSBASEDSLI_PERFORMANCETHRESHOLD.oneofs_by_name["type"] -_WINDOWSBASEDSLI_METRICRANGE.fields_by_name["range"].message_type = _RANGE -_WINDOWSBASEDSLI_METRICRANGE.containing_type = _WINDOWSBASEDSLI -_WINDOWSBASEDSLI.fields_by_name[ - "good_total_ratio_threshold" -].message_type = _WINDOWSBASEDSLI_PERFORMANCETHRESHOLD -_WINDOWSBASEDSLI.fields_by_name[ - "metric_mean_in_range" -].message_type = _WINDOWSBASEDSLI_METRICRANGE -_WINDOWSBASEDSLI.fields_by_name[ - "metric_sum_in_range" -].message_type = _WINDOWSBASEDSLI_METRICRANGE -_WINDOWSBASEDSLI.fields_by_name[ - "window_period" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_WINDOWSBASEDSLI.oneofs_by_name["window_criterion"].fields.append( - _WINDOWSBASEDSLI.fields_by_name["good_bad_metric_filter"] -) -_WINDOWSBASEDSLI.fields_by_name[ - "good_bad_metric_filter" -].containing_oneof = _WINDOWSBASEDSLI.oneofs_by_name["window_criterion"] -_WINDOWSBASEDSLI.oneofs_by_name["window_criterion"].fields.append( - _WINDOWSBASEDSLI.fields_by_name["good_total_ratio_threshold"] -) -_WINDOWSBASEDSLI.fields_by_name[ - "good_total_ratio_threshold" -].containing_oneof = _WINDOWSBASEDSLI.oneofs_by_name["window_criterion"] -_WINDOWSBASEDSLI.oneofs_by_name["window_criterion"].fields.append( - _WINDOWSBASEDSLI.fields_by_name["metric_mean_in_range"] -) -_WINDOWSBASEDSLI.fields_by_name[ - "metric_mean_in_range" -].containing_oneof = _WINDOWSBASEDSLI.oneofs_by_name["window_criterion"] -_WINDOWSBASEDSLI.oneofs_by_name["window_criterion"].fields.append( - _WINDOWSBASEDSLI.fields_by_name["metric_sum_in_range"] -) -_WINDOWSBASEDSLI.fields_by_name[ - "metric_sum_in_range" -].containing_oneof = _WINDOWSBASEDSLI.oneofs_by_name["window_criterion"] -DESCRIPTOR.message_types_by_name["Service"] = _SERVICE -DESCRIPTOR.message_types_by_name["ServiceLevelObjective"] = _SERVICELEVELOBJECTIVE -DESCRIPTOR.message_types_by_name["ServiceLevelIndicator"] = _SERVICELEVELINDICATOR -DESCRIPTOR.message_types_by_name["BasicSli"] = _BASICSLI -DESCRIPTOR.message_types_by_name["Range"] = _RANGE -DESCRIPTOR.message_types_by_name["RequestBasedSli"] = _REQUESTBASEDSLI -DESCRIPTOR.message_types_by_name["TimeSeriesRatio"] = _TIMESERIESRATIO -DESCRIPTOR.message_types_by_name["DistributionCut"] = _DISTRIBUTIONCUT -DESCRIPTOR.message_types_by_name["WindowsBasedSli"] = _WINDOWSBASEDSLI -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Service = _reflection.GeneratedProtocolMessageType( - "Service", - (_message.Message,), - dict( - Custom=_reflection.GeneratedProtocolMessageType( - "Custom", - (_message.Message,), - dict( - DESCRIPTOR=_SERVICE_CUSTOM, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""Custom view of service telemetry. Currently a place-holder - pending final design. - - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.Service.Custom) - ), - ), - AppEngine=_reflection.GeneratedProtocolMessageType( - "AppEngine", - (_message.Message,), - dict( - DESCRIPTOR=_SERVICE_APPENGINE, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""App Engine service. Learn more at - https://cloud.google.com/appengine. - - - Attributes: - module_id: - The ID of the App Engine module underlying this service. - Corresponds to the ``module_id`` resource label in the - ``gae_app`` monitored resource: https://cloud.google.com/monit - oring/api/resources#tag\_gae\_app - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.Service.AppEngine) - ), - ), - CloudEndpoints=_reflection.GeneratedProtocolMessageType( - "CloudEndpoints", - (_message.Message,), - dict( - DESCRIPTOR=_SERVICE_CLOUDENDPOINTS, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""Cloud Endpoints service. Learn more at - https://cloud.google.com/endpoints. - - - Attributes: - service: - The name of the Cloud Endpoints service underlying this - service. Corresponds to the ``service`` resource label in the - ``api`` monitored resource: - https://cloud.google.com/monitoring/api/resources#tag\_api - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.Service.CloudEndpoints) - ), - ), - ClusterIstio=_reflection.GeneratedProtocolMessageType( - "ClusterIstio", - (_message.Message,), - dict( - DESCRIPTOR=_SERVICE_CLUSTERISTIO, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""Istio service. Learn more at http://istio.io. - - - Attributes: - location: - The location of the Kubernetes cluster in which this Istio - service is defined. Corresponds to the ``location`` resource - label in ``k8s_cluster`` resources. - cluster_name: - The name of the Kubernetes cluster in which this Istio service - is defined. Corresponds to the ``cluster_name`` resource label - in ``k8s_cluster`` resources. - service_namespace: - The namespace of the Istio service underlying this service. - Corresponds to the ``destination_service_namespace`` metric - label in Istio metrics. - service_name: - The name of the Istio service underlying this service. - Corresponds to the ``destination_service_name`` metric label - in Istio metrics. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.Service.ClusterIstio) - ), - ), - Telemetry=_reflection.GeneratedProtocolMessageType( - "Telemetry", - (_message.Message,), - dict( - DESCRIPTOR=_SERVICE_TELEMETRY, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""Configuration for how to query telemetry on a Service. - - - Attributes: - resource_name: - The full name of the resource that defines this service. - Formatted as described in - https://cloud.google.com/apis/design/resource\_names. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.Service.Telemetry) - ), - ), - DESCRIPTOR=_SERVICE, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""A ``Service`` is a discrete, autonomous, and - network-accessible unit, designed to solve an individual concern - (`Wikipedia `__). In - Stackdriver Monitoring, a ``Service`` acts as the root resource under - which operational aspects of the service are accessible. - - - Attributes: - name: - Resource name for this Service. Of the form - ``projects/{project_id}/services/{service_id}``. - display_name: - Name used for UI elements listing this Service. - identifier: - REQUIRED. Service-identifying atoms specifying the underlying - service. - custom: - Custom service type. - app_engine: - Type used for App Engine services. - cloud_endpoints: - Type used for Cloud Endpoints services. - cluster_istio: - Type used for Istio services that live in a Kubernetes - cluster. - telemetry: - Configuration for how to query telemetry on a Service. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.Service) - ), -) -_sym_db.RegisterMessage(Service) -_sym_db.RegisterMessage(Service.Custom) -_sym_db.RegisterMessage(Service.AppEngine) -_sym_db.RegisterMessage(Service.CloudEndpoints) -_sym_db.RegisterMessage(Service.ClusterIstio) -_sym_db.RegisterMessage(Service.Telemetry) - -ServiceLevelObjective = _reflection.GeneratedProtocolMessageType( - "ServiceLevelObjective", - (_message.Message,), - dict( - DESCRIPTOR=_SERVICELEVELOBJECTIVE, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""A Service-Level Objective (SLO) describes a level of - desired good service. It consists of a service-level indicator (SLI), a - performance goal, and a period over which the objective is to be - evaluated against that goal. The SLO can use SLIs defined in a number of - different manners. Typical SLOs might include "99% of requests in each - rolling week have latency below 200 milliseconds" or "99.5% of requests - in each calendar month return successfully." - - - Attributes: - name: - Resource name for this ``ServiceLevelObjective``. Of the form - ``projects/{project_id}/services/{service_id}/serviceLevelObje - ctives/{slo_name}``. - display_name: - Name used for UI elements listing this SLO. - service_level_indicator: - The definition of good service, used to measure and calculate - the quality of the ``Service``'s performance with respect to a - single aspect of service quality. - goal: - The fraction of service that must be good in order for this - objective to be met. ``0 < goal <= 0.999``. - period: - The time period over which the objective will be evaluated. - rolling_period: - A rolling time period, semantically "in the past - ````". Must be an integer multiple of 1 day no - larger than 30 days. - calendar_period: - A calendar period, semantically "since the start of the - current ````". At this time, only ``DAY``, - ``WEEK``, ``FORTNIGHT``, and ``MONTH`` are supported. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ServiceLevelObjective) - ), -) -_sym_db.RegisterMessage(ServiceLevelObjective) - -ServiceLevelIndicator = _reflection.GeneratedProtocolMessageType( - "ServiceLevelIndicator", - (_message.Message,), - dict( - DESCRIPTOR=_SERVICELEVELINDICATOR, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""A Service-Level Indicator (SLI) describes the - "performance" of a service. For some services, the SLI is well-defined. - In such cases, the SLI can be described easily by referencing the - well-known SLI and providing the needed parameters. Alternatively, a - "custom" SLI can be defined with a query to the underlying metric store. - An SLI is defined to be ``good_service / total_service`` over any - queried time interval. The value of performance always falls into the - range ``0 <= performance <= 1``. A custom SLI describes how to compute - this ratio, whether this is by dividing values from a pair of time - series, cutting a ``Distribution`` into good and bad counts, or counting - time windows in which the service complies with a criterion. For - separation of concerns, a single Service-Level Indicator measures - performance for only one aspect of service quality, such as fraction of - successful queries or fast-enough queries. - - - Attributes: - type: - Service level indicators can be grouped by whether the "unit" - of service being measured is based on counts of good requests - or on counts of good time windows - basic_sli: - Basic SLI on a well-known service type. - request_based: - Request-based SLIs - windows_based: - Windows-based SLIs - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ServiceLevelIndicator) - ), -) -_sym_db.RegisterMessage(ServiceLevelIndicator) - -BasicSli = _reflection.GeneratedProtocolMessageType( - "BasicSli", - (_message.Message,), - dict( - AvailabilityCriteria=_reflection.GeneratedProtocolMessageType( - "AvailabilityCriteria", - (_message.Message,), - dict( - DESCRIPTOR=_BASICSLI_AVAILABILITYCRITERIA, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""Future parameters for the availability SLI. - - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.BasicSli.AvailabilityCriteria) - ), - ), - LatencyCriteria=_reflection.GeneratedProtocolMessageType( - "LatencyCriteria", - (_message.Message,), - dict( - DESCRIPTOR=_BASICSLI_LATENCYCRITERIA, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""Parameters for a latency threshold SLI. - - - Attributes: - threshold: - Good service is defined to be the count of requests made to - this service that return in no more than ``threshold``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.BasicSli.LatencyCriteria) - ), - ), - DESCRIPTOR=_BASICSLI, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""An SLI measuring performance on a well-known service type. - Performance will be computed on the basis of pre-defined metrics. The - type of the ``service_resource`` determines the metrics to use and the - ``service_resource.labels`` and ``metric_labels`` are used to construct - a monitoring filter to filter that metric down to just the data relevant - to this service. - - - Attributes: - method: - OPTIONAL: The set of RPCs to which this SLI is relevant. - Telemetry from other methods will not be used to calculate - performance for this SLI. If omitted, this SLI applies to all - the Service's methods. For service types that don't support - breaking down by method, setting this field will result in an - error. - location: - OPTIONAL: The set of locations to which this SLI is relevant. - Telemetry from other locations will not be used to calculate - performance for this SLI. If omitted, this SLI applies to all - locations in which the Service has activity. For service types - that don't support breaking down by location, setting this - field will result in an error. - version: - OPTIONAL: The set of API versions to which this SLI is - relevant. Telemetry from other API versions will not be used - to calculate performance for this SLI. If omitted, this SLI - applies to all API versions. For service types that don't - support breaking down by version, setting this field will - result in an error. - sli_criteria: - This SLI can be evaluated on the basis of availability or - latency. - availability: - Good service is defined to be the count of requests made to - this service that return successfully. - latency: - Good service is defined to be the count of requests made to - this service that are fast enough with respect to - ``latency.threshold``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.BasicSli) - ), -) -_sym_db.RegisterMessage(BasicSli) -_sym_db.RegisterMessage(BasicSli.AvailabilityCriteria) -_sym_db.RegisterMessage(BasicSli.LatencyCriteria) - -Range = _reflection.GeneratedProtocolMessageType( - "Range", - (_message.Message,), - dict( - DESCRIPTOR=_RANGE, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""Range of numerical values, inclusive of ``min`` and - exclusive of ``max``. If the open range "< range.max" is desired, set - ``range.min = -infinity``. If the open range ">= range.min" is desired, - set ``range.max = infinity``. - - - Attributes: - min: - Range minimum. - max: - Range maximum. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.Range) - ), -) -_sym_db.RegisterMessage(Range) - -RequestBasedSli = _reflection.GeneratedProtocolMessageType( - "RequestBasedSli", - (_message.Message,), - dict( - DESCRIPTOR=_REQUESTBASEDSLI, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""Service Level Indicators for which atomic units of service - are counted directly. - - - Attributes: - method: - The means to compute a ratio of ``good_service`` to - ``total_service``. - good_total_ratio: - \ ``good_total_ratio`` is used when the ratio of - ``good_service`` to ``total_service`` is computed from two - ``TimeSeries``. - distribution_cut: - \ ``distribution_cut`` is used when ``good_service`` is a - count of values aggregated in a ``Distribution`` that fall - into a good range. The ``total_service`` is the total count of - all values aggregated in the ``Distribution``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.RequestBasedSli) - ), -) -_sym_db.RegisterMessage(RequestBasedSli) - -TimeSeriesRatio = _reflection.GeneratedProtocolMessageType( - "TimeSeriesRatio", - (_message.Message,), - dict( - DESCRIPTOR=_TIMESERIESRATIO, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""A ``TimeSeriesRatio`` specifies two ``TimeSeries`` to use - for computing the ``good_service / total_service`` ratio. The specified - ``TimeSeries`` must have ``ValueType = DOUBLE`` or ``ValueType = INT64`` - and must have ``MetricKind = DELTA`` or ``MetricKind = CUMULATIVE``. The - ``TimeSeriesRatio`` must specify exactly two of good, bad, and total, - and the relationship ``good_service + bad_service = total_service`` will - be assumed. - - - Attributes: - good_service_filter: - A `monitoring filter - `__ - specifying a ``TimeSeries`` quantifying good service provided. - Must have ``ValueType = DOUBLE`` or ``ValueType = INT64`` and - must have ``MetricKind = DELTA`` or ``MetricKind = - CUMULATIVE``. - bad_service_filter: - A `monitoring filter - `__ - specifying a ``TimeSeries`` quantifying bad service, either - demanded service that was not provided or demanded service - that was of inadequate quality. Must have ``ValueType = - DOUBLE`` or ``ValueType = INT64`` and must have ``MetricKind = - DELTA`` or ``MetricKind = CUMULATIVE``. - total_service_filter: - A `monitoring filter - `__ - specifying a ``TimeSeries`` quantifying total demanded - service. Must have ``ValueType = DOUBLE`` or ``ValueType = - INT64`` and must have ``MetricKind = DELTA`` or ``MetricKind = - CUMULATIVE``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.TimeSeriesRatio) - ), -) -_sym_db.RegisterMessage(TimeSeriesRatio) - -DistributionCut = _reflection.GeneratedProtocolMessageType( - "DistributionCut", - (_message.Message,), - dict( - DESCRIPTOR=_DISTRIBUTIONCUT, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""A ``DistributionCut`` defines a ``TimeSeries`` and - thresholds used for measuring good service and total service. The - ``TimeSeries`` must have ``ValueType = DISTRIBUTION`` and - ``MetricKind = DELTA`` or ``MetricKind = CUMULATIVE``. The computed - ``good_service`` will be the count of values x in the ``Distribution`` - such that ``range.min <= x < range.max``. - - - Attributes: - distribution_filter: - A `monitoring filter - `__ - specifying a ``TimeSeries`` aggregating values. Must have - ``ValueType = DISTRIBUTION`` and ``MetricKind = DELTA`` or - ``MetricKind = CUMULATIVE``. - range: - Range of values considered "good." For a one-sided range, set - one bound to an infinite value. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.DistributionCut) - ), -) -_sym_db.RegisterMessage(DistributionCut) - -WindowsBasedSli = _reflection.GeneratedProtocolMessageType( - "WindowsBasedSli", - (_message.Message,), - dict( - PerformanceThreshold=_reflection.GeneratedProtocolMessageType( - "PerformanceThreshold", - (_message.Message,), - dict( - DESCRIPTOR=_WINDOWSBASEDSLI_PERFORMANCETHRESHOLD, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""A ``PerformanceThreshold`` is used when each window is - good when that window has a sufficiently high ``performance``. - - - Attributes: - type: - The means, either a request-based SLI or a basic SLI, by which - to compute performance over a window. - performance: - \ ``RequestBasedSli`` to evaluate to judge window quality. - basic_sli_performance: - \ ``BasicSli`` to evaluate to judge window quality. - threshold: - If window ``performance >= threshold``, the window is counted - as good. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.WindowsBasedSli.PerformanceThreshold) - ), - ), - MetricRange=_reflection.GeneratedProtocolMessageType( - "MetricRange", - (_message.Message,), - dict( - DESCRIPTOR=_WINDOWSBASEDSLI_METRICRANGE, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""A ``MetricRange`` is used when each window is good when - the value x of a single ``TimeSeries`` satisfies - ``range.min <= x < range.max``. The provided ``TimeSeries`` must have - ``ValueType = INT64`` or ``ValueType = DOUBLE`` and - ``MetricKind = GAUGE``. - - - Attributes: - time_series: - A `monitoring filter - `__ - specifying the ``TimeSeries`` to use for evaluating window - quality. - range: - Range of values considered "good." For a one-sided range, set - one bound to an infinite value. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.WindowsBasedSli.MetricRange) - ), - ), - DESCRIPTOR=_WINDOWSBASEDSLI, - __module__="google.cloud.monitoring_v3.proto.service_pb2", - __doc__="""A ``WindowsBasedSli`` defines ``good_service`` as the - count of time windows for which the provided service was of good - quality. Criteria for determining if service was good are embedded in - the ``window_criterion``. - - - Attributes: - window_criterion: - The criterion to use for evaluating window goodness. - good_bad_metric_filter: - A `monitoring filter - `__ - specifying a ``TimeSeries`` with ``ValueType = BOOL``. The - window is good if any ``true`` values appear in the window. - good_total_ratio_threshold: - A window is good if its ``performance`` is high enough. - metric_mean_in_range: - A window is good if the metric's value is in a good range, - averaged across returned streams. - metric_sum_in_range: - A window is good if the metric's value is in a good range, - summed across returned streams. - window_period: - Duration over which window quality is evaluated. Must be an - integer fraction of a day and at least ``60s``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.WindowsBasedSli) - ), -) -_sym_db.RegisterMessage(WindowsBasedSli) -_sym_db.RegisterMessage(WindowsBasedSli.PerformanceThreshold) -_sym_db.RegisterMessage(WindowsBasedSli.MetricRange) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/service_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/service_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/service_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/monitoring/google/cloud/monitoring_v3/proto/service_service.proto b/monitoring/google/cloud/monitoring_v3/proto/service_service.proto deleted file mode 100644 index e0a35833e5b8..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/service_service.proto +++ /dev/null @@ -1,285 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -import "google/api/annotations.proto"; -import "google/api/monitored_resource.proto"; -import "google/monitoring/v3/service.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/api/client.proto"; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "ServiceMonitoringServiceProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// The Stackdriver Monitoring Service-Oriented Monitoring API has endpoints for -// managing and querying aspects of a workspace's services. These include the -// `Service`'s monitored resources, its Service-Level Objectives, and a taxonomy -// of categorized Health Metrics. -service ServiceMonitoringService { - option (google.api.default_host) = "monitoring.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/monitoring," - "https://www.googleapis.com/auth/monitoring.read"; - - // Create a `Service`. - rpc CreateService(CreateServiceRequest) returns (Service) { - option (google.api.http) = { - post: "/v3/{parent=*/*}/services" - body: "service" - }; - } - - // Get the named `Service`. - rpc GetService(GetServiceRequest) returns (Service) { - option (google.api.http) = { - get: "/v3/{name=*/*/services/*}" - }; - } - - // List `Service`s for this workspace. - rpc ListServices(ListServicesRequest) returns (ListServicesResponse) { - option (google.api.http) = { - get: "/v3/{parent=*/*}/services" - }; - } - - // Update this `Service`. - rpc UpdateService(UpdateServiceRequest) returns (Service) { - option (google.api.http) = { - patch: "/v3/{service.name=*/*/services/*}" - body: "service" - }; - } - - // Soft delete this `Service`. - rpc DeleteService(DeleteServiceRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v3/{name=*/*/services/*}" - }; - } - - // Create a `ServiceLevelObjective` for the given `Service`. - rpc CreateServiceLevelObjective(CreateServiceLevelObjectiveRequest) returns (ServiceLevelObjective) { - option (google.api.http) = { - post: "/v3/{parent=*/*/services/*}/serviceLevelObjectives" - body: "service_level_objective" - }; - } - - // Get a `ServiceLevelObjective` by name. - rpc GetServiceLevelObjective(GetServiceLevelObjectiveRequest) returns (ServiceLevelObjective) { - option (google.api.http) = { - get: "/v3/{name=*/*/services/*/serviceLevelObjectives/*}" - }; - } - - // List the `ServiceLevelObjective`s for the given `Service`. - rpc ListServiceLevelObjectives(ListServiceLevelObjectivesRequest) returns (ListServiceLevelObjectivesResponse) { - option (google.api.http) = { - get: "/v3/{parent=*/*/services/*}/serviceLevelObjectives" - }; - } - - // Update the given `ServiceLevelObjective`. - rpc UpdateServiceLevelObjective(UpdateServiceLevelObjectiveRequest) returns (ServiceLevelObjective) { - option (google.api.http) = { - patch: "/v3/{service_level_objective.name=*/*/services/*/serviceLevelObjectives/*}" - body: "service_level_objective" - }; - } - - // Delete the given `ServiceLevelObjective`. - rpc DeleteServiceLevelObjective(DeleteServiceLevelObjectiveRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v3/{name=*/*/services/*/serviceLevelObjectives/*}" - }; - } -} - -// The `CreateService` request. -message CreateServiceRequest { - // Resource name of the parent workspace. - // Of the form `projects/{project_id}`. - string parent = 1; - - // Optional. The Service id to use for this Service. If omitted, an id will be - // generated instead. Must match the pattern [a-z0-9\-]+ - string service_id = 3; - - // The `Service` to create. - Service service = 2; -} - -// The `GetService` request. -message GetServiceRequest { - // Resource name of the `Service`. - // Of the form `projects/{project_id}/services/{service_id}`. - string name = 1; -} - -// The `ListServices` request. -message ListServicesRequest { - // Resource name of the parent `Workspace`. - // Of the form `projects/{project_id}`. - string parent = 1; - - // A filter specifying what `Service`s to return. The filter currently - // supports the following fields: - // - // - `identifier_case` - // - `app_engine.module_id` - // - `cloud_endpoints.service` - // - `cluster_istio.location` - // - `cluster_istio.cluster_name` - // - `cluster_istio.service_namespace` - // - `cluster_istio.service_name` - // - // `identifier_case` refers to which option in the identifier oneof is - // populated. For example, the filter `identifier_case = "CUSTOM"` would match - // all services with a value for the `custom` field. Valid options are - // "CUSTOM", "APP_ENGINE", "CLOUD_ENDPOINTS", and "CLUSTER_ISTIO". - string filter = 2; - - // A non-negative number that is the maximum number of results to return. - // When 0, use default page size. - int32 page_size = 3; - - // If this field is not empty then it must contain the `nextPageToken` value - // returned by a previous call to this method. Using this field causes the - // method to return additional results from the previous method call. - string page_token = 4; -} - -// The `ListServices` response. -message ListServicesResponse { - // The `Service`s matching the specified filter. - repeated Service services = 1; - - // If there are more results than have been returned, then this field is set - // to a non-empty value. To see the additional results, - // use that value as `pageToken` in the next call to this method. - string next_page_token = 2; -} - -// The `UpdateService` request. -message UpdateServiceRequest { - // The `Service` to draw updates from. - // The given `name` specifies the resource to update. - Service service = 1; - - // A set of field paths defining which fields to use for the update. - google.protobuf.FieldMask update_mask = 2; -} - -// The `DeleteService` request. -message DeleteServiceRequest { - // Resource name of the `Service` to delete. - // Of the form `projects/{project_id}/service/{service_id}`. - string name = 1; -} - -// The `CreateServiceLevelObjective` request. -message CreateServiceLevelObjectiveRequest { - // Resource name of the parent `Service`. - // Of the form `projects/{project_id}/services/{service_id}`. - string parent = 1; - - // Optional. The ServiceLevelObjective id to use for this - // ServiceLevelObjective. If omitted, an id will be generated instead. Must - // match the pattern [a-z0-9\-]+ - string service_level_objective_id = 3; - - // The `ServiceLevelObjective` to create. - // The provided `name` will be respected if no `ServiceLevelObjective` exists - // with this name. - ServiceLevelObjective service_level_objective = 2; -} - -// The `GetServiceLevelObjective` request. -message GetServiceLevelObjectiveRequest { - // Resource name of the `ServiceLevelObjective` to get. - // Of the form - // `projects/{project_id}/services/{service_id}/serviceLevelObjectives/{slo_name}`. - string name = 1; - - // View of the `ServiceLevelObjective` to return. If `DEFAULT`, return the - // `ServiceLevelObjective` as originally defined. If `EXPLICIT` and the - // `ServiceLevelObjective` is defined in terms of a `BasicSli`, replace the - // `BasicSli` with a `RequestBasedSli` spelling out how the SLI is computed. - ServiceLevelObjective.View view = 2; -} - -// The `ListServiceLevelObjectives` request. -message ListServiceLevelObjectivesRequest { - // Resource name of the parent `Service`. - // Of the form `projects/{project_id}/services/{service_id}`. - string parent = 1; - - // A filter specifying what `ServiceLevelObjective`s to return. - string filter = 2; - - // A non-negative number that is the maximum number of results to return. - // When 0, use default page size. - int32 page_size = 3; - - // If this field is not empty then it must contain the `nextPageToken` value - // returned by a previous call to this method. Using this field causes the - // method to return additional results from the previous method call. - string page_token = 4; - - // View of the `ServiceLevelObjective`s to return. If `DEFAULT`, return each - // `ServiceLevelObjective` as originally defined. If `EXPLICIT` and the - // `ServiceLevelObjective` is defined in terms of a `BasicSli`, replace the - // `BasicSli` with a `RequestBasedSli` spelling out how the SLI is computed. - ServiceLevelObjective.View view = 5; -} - -// The `ListServiceLevelObjectives` response. -message ListServiceLevelObjectivesResponse { - // The `ServiceLevelObjective`s matching the specified filter. - repeated ServiceLevelObjective service_level_objectives = 1; - - // If there are more results than have been returned, then this field is set - // to a non-empty value. To see the additional results, - // use that value as `pageToken` in the next call to this method. - string next_page_token = 2; -} - -// The `UpdateServiceLevelObjective` request. -message UpdateServiceLevelObjectiveRequest { - // The `ServiceLevelObjective` to draw updates from. - // The given `name` specifies the resource to update. - ServiceLevelObjective service_level_objective = 1; - - // A set of field paths defining which fields to use for the update. - google.protobuf.FieldMask update_mask = 2; -} - -// The `DeleteServiceLevelObjective` request. -message DeleteServiceLevelObjectiveRequest { - // Resource name of the `ServiceLevelObjective` to delete. - // Of the form - // `projects/{project_id}/services/{service_id}/serviceLevelObjectives/{slo_name}`. - string name = 1; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/service_service_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/service_service_pb2.py deleted file mode 100644 index 4cc7aaa4aaca..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/service_service_pb2.py +++ /dev/null @@ -1,1307 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/service_service.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.cloud.monitoring_v3.proto import ( - service_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/service_service.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\035ServiceMonitoringServiceProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n6google/cloud/monitoring_v3/proto/service_service.proto\x12\x14google.monitoring.v3\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a.google/cloud/monitoring_v3/proto/service.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/api/client.proto"j\n\x14\x43reateServiceRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\nservice_id\x18\x03 \x01(\t\x12.\n\x07service\x18\x02 \x01(\x0b\x32\x1d.google.monitoring.v3.Service"!\n\x11GetServiceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x13ListServicesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"`\n\x14ListServicesResponse\x12/\n\x08services\x18\x01 \x03(\x0b\x32\x1d.google.monitoring.v3.Service\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"w\n\x14UpdateServiceRequest\x12.\n\x07service\x18\x01 \x01(\x0b\x32\x1d.google.monitoring.v3.Service\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"$\n\x14\x44\x65leteServiceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\xa6\x01\n"CreateServiceLevelObjectiveRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12"\n\x1aservice_level_objective_id\x18\x03 \x01(\t\x12L\n\x17service_level_objective\x18\x02 \x01(\x0b\x32+.google.monitoring.v3.ServiceLevelObjective"o\n\x1fGetServiceLevelObjectiveRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12>\n\x04view\x18\x02 \x01(\x0e\x32\x30.google.monitoring.v3.ServiceLevelObjective.View"\xaa\x01\n!ListServiceLevelObjectivesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\x12>\n\x04view\x18\x05 \x01(\x0e\x32\x30.google.monitoring.v3.ServiceLevelObjective.View"\x8c\x01\n"ListServiceLevelObjectivesResponse\x12M\n\x18service_level_objectives\x18\x01 \x03(\x0b\x32+.google.monitoring.v3.ServiceLevelObjective\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xa3\x01\n"UpdateServiceLevelObjectiveRequest\x12L\n\x17service_level_objective\x18\x01 \x01(\x0b\x32+.google.monitoring.v3.ServiceLevelObjective\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"2\n"DeleteServiceLevelObjectiveRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\xe5\x0e\n\x18ServiceMonitoringService\x12\x86\x01\n\rCreateService\x12*.google.monitoring.v3.CreateServiceRequest\x1a\x1d.google.monitoring.v3.Service"*\x82\xd3\xe4\x93\x02$"\x19/v3/{parent=*/*}/services:\x07service\x12w\n\nGetService\x12\'.google.monitoring.v3.GetServiceRequest\x1a\x1d.google.monitoring.v3.Service"!\x82\xd3\xe4\x93\x02\x1b\x12\x19/v3/{name=*/*/services/*}\x12\x88\x01\n\x0cListServices\x12).google.monitoring.v3.ListServicesRequest\x1a*.google.monitoring.v3.ListServicesResponse"!\x82\xd3\xe4\x93\x02\x1b\x12\x19/v3/{parent=*/*}/services\x12\x8e\x01\n\rUpdateService\x12*.google.monitoring.v3.UpdateServiceRequest\x1a\x1d.google.monitoring.v3.Service"2\x82\xd3\xe4\x93\x02,2!/v3/{service.name=*/*/services/*}:\x07service\x12v\n\rDeleteService\x12*.google.monitoring.v3.DeleteServiceRequest\x1a\x16.google.protobuf.Empty"!\x82\xd3\xe4\x93\x02\x1b*\x19/v3/{name=*/*/services/*}\x12\xd9\x01\n\x1b\x43reateServiceLevelObjective\x12\x38.google.monitoring.v3.CreateServiceLevelObjectiveRequest\x1a+.google.monitoring.v3.ServiceLevelObjective"S\x82\xd3\xe4\x93\x02M"2/v3/{parent=*/*/services/*}/serviceLevelObjectives:\x17service_level_objective\x12\xba\x01\n\x18GetServiceLevelObjective\x12\x35.google.monitoring.v3.GetServiceLevelObjectiveRequest\x1a+.google.monitoring.v3.ServiceLevelObjective":\x82\xd3\xe4\x93\x02\x34\x12\x32/v3/{name=*/*/services/*/serviceLevelObjectives/*}\x12\xcb\x01\n\x1aListServiceLevelObjectives\x12\x37.google.monitoring.v3.ListServiceLevelObjectivesRequest\x1a\x38.google.monitoring.v3.ListServiceLevelObjectivesResponse":\x82\xd3\xe4\x93\x02\x34\x12\x32/v3/{parent=*/*/services/*}/serviceLevelObjectives\x12\xf1\x01\n\x1bUpdateServiceLevelObjective\x12\x38.google.monitoring.v3.UpdateServiceLevelObjectiveRequest\x1a+.google.monitoring.v3.ServiceLevelObjective"k\x82\xd3\xe4\x93\x02\x65\x32J/v3/{service_level_objective.name=*/*/services/*/serviceLevelObjectives/*}:\x17service_level_objective\x12\xab\x01\n\x1b\x44\x65leteServiceLevelObjective\x12\x38.google.monitoring.v3.DeleteServiceLevelObjectiveRequest\x1a\x16.google.protobuf.Empty":\x82\xd3\xe4\x93\x02\x34*2/v3/{name=*/*/services/*/serviceLevelObjectives/*}\x1a\xa9\x01\xca\x41\x19monitoring.googleapis.com\xd2\x41\x89\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.readB\xb5\x01\n\x18\x63om.google.monitoring.v3B\x1dServiceMonitoringServiceProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_CREATESERVICEREQUEST = _descriptor.Descriptor( - name="CreateServiceRequest", - full_name="google.monitoring.v3.CreateServiceRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.monitoring.v3.CreateServiceRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_id", - full_name="google.monitoring.v3.CreateServiceRequest.service_id", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service", - full_name="google.monitoring.v3.CreateServiceRequest.service", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=283, - serialized_end=389, -) - - -_GETSERVICEREQUEST = _descriptor.Descriptor( - name="GetServiceRequest", - full_name="google.monitoring.v3.GetServiceRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.GetServiceRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=391, - serialized_end=424, -) - - -_LISTSERVICESREQUEST = _descriptor.Descriptor( - name="ListServicesRequest", - full_name="google.monitoring.v3.ListServicesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.monitoring.v3.ListServicesRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.monitoring.v3.ListServicesRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.monitoring.v3.ListServicesRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.monitoring.v3.ListServicesRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=426, - serialized_end=518, -) - - -_LISTSERVICESRESPONSE = _descriptor.Descriptor( - name="ListServicesResponse", - full_name="google.monitoring.v3.ListServicesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="services", - full_name="google.monitoring.v3.ListServicesResponse.services", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.monitoring.v3.ListServicesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=520, - serialized_end=616, -) - - -_UPDATESERVICEREQUEST = _descriptor.Descriptor( - name="UpdateServiceRequest", - full_name="google.monitoring.v3.UpdateServiceRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="service", - full_name="google.monitoring.v3.UpdateServiceRequest.service", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.monitoring.v3.UpdateServiceRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=618, - serialized_end=737, -) - - -_DELETESERVICEREQUEST = _descriptor.Descriptor( - name="DeleteServiceRequest", - full_name="google.monitoring.v3.DeleteServiceRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.DeleteServiceRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=739, - serialized_end=775, -) - - -_CREATESERVICELEVELOBJECTIVEREQUEST = _descriptor.Descriptor( - name="CreateServiceLevelObjectiveRequest", - full_name="google.monitoring.v3.CreateServiceLevelObjectiveRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.monitoring.v3.CreateServiceLevelObjectiveRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_level_objective_id", - full_name="google.monitoring.v3.CreateServiceLevelObjectiveRequest.service_level_objective_id", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_level_objective", - full_name="google.monitoring.v3.CreateServiceLevelObjectiveRequest.service_level_objective", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=778, - serialized_end=944, -) - - -_GETSERVICELEVELOBJECTIVEREQUEST = _descriptor.Descriptor( - name="GetServiceLevelObjectiveRequest", - full_name="google.monitoring.v3.GetServiceLevelObjectiveRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.GetServiceLevelObjectiveRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="view", - full_name="google.monitoring.v3.GetServiceLevelObjectiveRequest.view", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=946, - serialized_end=1057, -) - - -_LISTSERVICELEVELOBJECTIVESREQUEST = _descriptor.Descriptor( - name="ListServiceLevelObjectivesRequest", - full_name="google.monitoring.v3.ListServiceLevelObjectivesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.monitoring.v3.ListServiceLevelObjectivesRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.monitoring.v3.ListServiceLevelObjectivesRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.monitoring.v3.ListServiceLevelObjectivesRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.monitoring.v3.ListServiceLevelObjectivesRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="view", - full_name="google.monitoring.v3.ListServiceLevelObjectivesRequest.view", - index=4, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1060, - serialized_end=1230, -) - - -_LISTSERVICELEVELOBJECTIVESRESPONSE = _descriptor.Descriptor( - name="ListServiceLevelObjectivesResponse", - full_name="google.monitoring.v3.ListServiceLevelObjectivesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="service_level_objectives", - full_name="google.monitoring.v3.ListServiceLevelObjectivesResponse.service_level_objectives", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.monitoring.v3.ListServiceLevelObjectivesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1233, - serialized_end=1373, -) - - -_UPDATESERVICELEVELOBJECTIVEREQUEST = _descriptor.Descriptor( - name="UpdateServiceLevelObjectiveRequest", - full_name="google.monitoring.v3.UpdateServiceLevelObjectiveRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="service_level_objective", - full_name="google.monitoring.v3.UpdateServiceLevelObjectiveRequest.service_level_objective", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.monitoring.v3.UpdateServiceLevelObjectiveRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1376, - serialized_end=1539, -) - - -_DELETESERVICELEVELOBJECTIVEREQUEST = _descriptor.Descriptor( - name="DeleteServiceLevelObjectiveRequest", - full_name="google.monitoring.v3.DeleteServiceLevelObjectiveRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.DeleteServiceLevelObjectiveRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1541, - serialized_end=1591, -) - -_CREATESERVICEREQUEST.fields_by_name[ - "service" -].message_type = google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICE -_LISTSERVICESRESPONSE.fields_by_name[ - "services" -].message_type = google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICE -_UPDATESERVICEREQUEST.fields_by_name[ - "service" -].message_type = google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICE -_UPDATESERVICEREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_CREATESERVICELEVELOBJECTIVEREQUEST.fields_by_name[ - "service_level_objective" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICELEVELOBJECTIVE -) -_GETSERVICELEVELOBJECTIVEREQUEST.fields_by_name[ - "view" -].enum_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICELEVELOBJECTIVE_VIEW -) -_LISTSERVICELEVELOBJECTIVESREQUEST.fields_by_name[ - "view" -].enum_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICELEVELOBJECTIVE_VIEW -) -_LISTSERVICELEVELOBJECTIVESRESPONSE.fields_by_name[ - "service_level_objectives" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICELEVELOBJECTIVE -) -_UPDATESERVICELEVELOBJECTIVEREQUEST.fields_by_name[ - "service_level_objective" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICELEVELOBJECTIVE -) -_UPDATESERVICELEVELOBJECTIVEREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -DESCRIPTOR.message_types_by_name["CreateServiceRequest"] = _CREATESERVICEREQUEST -DESCRIPTOR.message_types_by_name["GetServiceRequest"] = _GETSERVICEREQUEST -DESCRIPTOR.message_types_by_name["ListServicesRequest"] = _LISTSERVICESREQUEST -DESCRIPTOR.message_types_by_name["ListServicesResponse"] = _LISTSERVICESRESPONSE -DESCRIPTOR.message_types_by_name["UpdateServiceRequest"] = _UPDATESERVICEREQUEST -DESCRIPTOR.message_types_by_name["DeleteServiceRequest"] = _DELETESERVICEREQUEST -DESCRIPTOR.message_types_by_name[ - "CreateServiceLevelObjectiveRequest" -] = _CREATESERVICELEVELOBJECTIVEREQUEST -DESCRIPTOR.message_types_by_name[ - "GetServiceLevelObjectiveRequest" -] = _GETSERVICELEVELOBJECTIVEREQUEST -DESCRIPTOR.message_types_by_name[ - "ListServiceLevelObjectivesRequest" -] = _LISTSERVICELEVELOBJECTIVESREQUEST -DESCRIPTOR.message_types_by_name[ - "ListServiceLevelObjectivesResponse" -] = _LISTSERVICELEVELOBJECTIVESRESPONSE -DESCRIPTOR.message_types_by_name[ - "UpdateServiceLevelObjectiveRequest" -] = _UPDATESERVICELEVELOBJECTIVEREQUEST -DESCRIPTOR.message_types_by_name[ - "DeleteServiceLevelObjectiveRequest" -] = _DELETESERVICELEVELOBJECTIVEREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -CreateServiceRequest = _reflection.GeneratedProtocolMessageType( - "CreateServiceRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATESERVICEREQUEST, - __module__="google.cloud.monitoring_v3.proto.service_service_pb2", - __doc__="""The ``CreateService`` request. - - - Attributes: - parent: - Resource name of the parent workspace. Of the form - ``projects/{project_id}``. - service_id: - Optional. The Service id to use for this Service. If omitted, - an id will be generated instead. Must match the pattern - [a-z0-9-]+ - service: - The ``Service`` to create. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateServiceRequest) - ), -) -_sym_db.RegisterMessage(CreateServiceRequest) - -GetServiceRequest = _reflection.GeneratedProtocolMessageType( - "GetServiceRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETSERVICEREQUEST, - __module__="google.cloud.monitoring_v3.proto.service_service_pb2", - __doc__="""The ``GetService`` request. - - - Attributes: - name: - Resource name of the ``Service``. Of the form - ``projects/{project_id}/services/{service_id}``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.GetServiceRequest) - ), -) -_sym_db.RegisterMessage(GetServiceRequest) - -ListServicesRequest = _reflection.GeneratedProtocolMessageType( - "ListServicesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTSERVICESREQUEST, - __module__="google.cloud.monitoring_v3.proto.service_service_pb2", - __doc__="""The ``ListServices`` request. - - - Attributes: - parent: - Resource name of the parent ``Workspace``. Of the form - ``projects/{project_id}``. - filter: - A filter specifying what ``Service``\ s to return. The filter - currently supports the following fields: :: - - `identifier_case` - `app_engine.module_id` - - `cloud_endpoints.service` - `cluster_istio.location` - - `cluster_istio.cluster_name` - - `cluster_istio.service_namespace` - - `cluster_istio.service_name` ``identifier_case`` refers to - which option in the identifier oneof is populated. For - example, the filter ``identifier_case = "CUSTOM"`` would match - all services with a value for the ``custom`` field. Valid - options are "CUSTOM", "APP\_ENGINE", "CLOUD\_ENDPOINTS", and - "CLUSTER\_ISTIO". - page_size: - A non-negative number that is the maximum number of results to - return. When 0, use default page size. - page_token: - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListServicesRequest) - ), -) -_sym_db.RegisterMessage(ListServicesRequest) - -ListServicesResponse = _reflection.GeneratedProtocolMessageType( - "ListServicesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTSERVICESRESPONSE, - __module__="google.cloud.monitoring_v3.proto.service_service_pb2", - __doc__="""The ``ListServices`` response. - - - Attributes: - services: - The ``Service``\ s matching the specified filter. - next_page_token: - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``pageToken`` in the next call to - this method. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListServicesResponse) - ), -) -_sym_db.RegisterMessage(ListServicesResponse) - -UpdateServiceRequest = _reflection.GeneratedProtocolMessageType( - "UpdateServiceRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATESERVICEREQUEST, - __module__="google.cloud.monitoring_v3.proto.service_service_pb2", - __doc__="""The ``UpdateService`` request. - - - Attributes: - service: - The ``Service`` to draw updates from. The given ``name`` - specifies the resource to update. - update_mask: - A set of field paths defining which fields to use for the - update. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UpdateServiceRequest) - ), -) -_sym_db.RegisterMessage(UpdateServiceRequest) - -DeleteServiceRequest = _reflection.GeneratedProtocolMessageType( - "DeleteServiceRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETESERVICEREQUEST, - __module__="google.cloud.monitoring_v3.proto.service_service_pb2", - __doc__="""The ``DeleteService`` request. - - - Attributes: - name: - Resource name of the ``Service`` to delete. Of the form - ``projects/{project_id}/service/{service_id}``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.DeleteServiceRequest) - ), -) -_sym_db.RegisterMessage(DeleteServiceRequest) - -CreateServiceLevelObjectiveRequest = _reflection.GeneratedProtocolMessageType( - "CreateServiceLevelObjectiveRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATESERVICELEVELOBJECTIVEREQUEST, - __module__="google.cloud.monitoring_v3.proto.service_service_pb2", - __doc__="""The ``CreateServiceLevelObjective`` request. - - - Attributes: - parent: - Resource name of the parent ``Service``. Of the form - ``projects/{project_id}/services/{service_id}``. - service_level_objective_id: - Optional. The ServiceLevelObjective id to use for this - ServiceLevelObjective. If omitted, an id will be generated - instead. Must match the pattern [a-z0-9-]+ - service_level_objective: - The ``ServiceLevelObjective`` to create. The provided ``name`` - will be respected if no ``ServiceLevelObjective`` exists with - this name. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateServiceLevelObjectiveRequest) - ), -) -_sym_db.RegisterMessage(CreateServiceLevelObjectiveRequest) - -GetServiceLevelObjectiveRequest = _reflection.GeneratedProtocolMessageType( - "GetServiceLevelObjectiveRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETSERVICELEVELOBJECTIVEREQUEST, - __module__="google.cloud.monitoring_v3.proto.service_service_pb2", - __doc__="""The ``GetServiceLevelObjective`` request. - - - Attributes: - name: - Resource name of the ``ServiceLevelObjective`` to get. Of the - form ``projects/{project_id}/services/{service_id}/serviceLeve - lObjectives/{slo_name}``. - view: - View of the ``ServiceLevelObjective`` to return. If - ``DEFAULT``, return the ``ServiceLevelObjective`` as - originally defined. If ``EXPLICIT`` and the - ``ServiceLevelObjective`` is defined in terms of a - ``BasicSli``, replace the ``BasicSli`` with a - ``RequestBasedSli`` spelling out how the SLI is computed. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.GetServiceLevelObjectiveRequest) - ), -) -_sym_db.RegisterMessage(GetServiceLevelObjectiveRequest) - -ListServiceLevelObjectivesRequest = _reflection.GeneratedProtocolMessageType( - "ListServiceLevelObjectivesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTSERVICELEVELOBJECTIVESREQUEST, - __module__="google.cloud.monitoring_v3.proto.service_service_pb2", - __doc__="""The ``ListServiceLevelObjectives`` request. - - - Attributes: - parent: - Resource name of the parent ``Service``. Of the form - ``projects/{project_id}/services/{service_id}``. - filter: - A filter specifying what ``ServiceLevelObjective``\ s to - return. - page_size: - A non-negative number that is the maximum number of results to - return. When 0, use default page size. - page_token: - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - view: - View of the ``ServiceLevelObjective``\ s to return. If - ``DEFAULT``, return each ``ServiceLevelObjective`` as - originally defined. If ``EXPLICIT`` and the - ``ServiceLevelObjective`` is defined in terms of a - ``BasicSli``, replace the ``BasicSli`` with a - ``RequestBasedSli`` spelling out how the SLI is computed. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListServiceLevelObjectivesRequest) - ), -) -_sym_db.RegisterMessage(ListServiceLevelObjectivesRequest) - -ListServiceLevelObjectivesResponse = _reflection.GeneratedProtocolMessageType( - "ListServiceLevelObjectivesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTSERVICELEVELOBJECTIVESRESPONSE, - __module__="google.cloud.monitoring_v3.proto.service_service_pb2", - __doc__="""The ``ListServiceLevelObjectives`` response. - - - Attributes: - service_level_objectives: - The ``ServiceLevelObjective``\ s matching the specified - filter. - next_page_token: - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``pageToken`` in the next call to - this method. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListServiceLevelObjectivesResponse) - ), -) -_sym_db.RegisterMessage(ListServiceLevelObjectivesResponse) - -UpdateServiceLevelObjectiveRequest = _reflection.GeneratedProtocolMessageType( - "UpdateServiceLevelObjectiveRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATESERVICELEVELOBJECTIVEREQUEST, - __module__="google.cloud.monitoring_v3.proto.service_service_pb2", - __doc__="""The ``UpdateServiceLevelObjective`` request. - - - Attributes: - service_level_objective: - The ``ServiceLevelObjective`` to draw updates from. The given - ``name`` specifies the resource to update. - update_mask: - A set of field paths defining which fields to use for the - update. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UpdateServiceLevelObjectiveRequest) - ), -) -_sym_db.RegisterMessage(UpdateServiceLevelObjectiveRequest) - -DeleteServiceLevelObjectiveRequest = _reflection.GeneratedProtocolMessageType( - "DeleteServiceLevelObjectiveRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETESERVICELEVELOBJECTIVEREQUEST, - __module__="google.cloud.monitoring_v3.proto.service_service_pb2", - __doc__="""The ``DeleteServiceLevelObjective`` request. - - - Attributes: - name: - Resource name of the ``ServiceLevelObjective`` to delete. Of - the form ``projects/{project_id}/services/{service_id}/service - LevelObjectives/{slo_name}``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.DeleteServiceLevelObjectiveRequest) - ), -) -_sym_db.RegisterMessage(DeleteServiceLevelObjectiveRequest) - - -DESCRIPTOR._options = None - -_SERVICEMONITORINGSERVICE = _descriptor.ServiceDescriptor( - name="ServiceMonitoringService", - full_name="google.monitoring.v3.ServiceMonitoringService", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\031monitoring.googleapis.com\322A\211\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.read" - ), - serialized_start=1594, - serialized_end=3487, - methods=[ - _descriptor.MethodDescriptor( - name="CreateService", - full_name="google.monitoring.v3.ServiceMonitoringService.CreateService", - index=0, - containing_service=None, - input_type=_CREATESERVICEREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICE, - serialized_options=_b( - '\202\323\344\223\002$"\031/v3/{parent=*/*}/services:\007service' - ), - ), - _descriptor.MethodDescriptor( - name="GetService", - full_name="google.monitoring.v3.ServiceMonitoringService.GetService", - index=1, - containing_service=None, - input_type=_GETSERVICEREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICE, - serialized_options=_b( - "\202\323\344\223\002\033\022\031/v3/{name=*/*/services/*}" - ), - ), - _descriptor.MethodDescriptor( - name="ListServices", - full_name="google.monitoring.v3.ServiceMonitoringService.ListServices", - index=2, - containing_service=None, - input_type=_LISTSERVICESREQUEST, - output_type=_LISTSERVICESRESPONSE, - serialized_options=_b( - "\202\323\344\223\002\033\022\031/v3/{parent=*/*}/services" - ), - ), - _descriptor.MethodDescriptor( - name="UpdateService", - full_name="google.monitoring.v3.ServiceMonitoringService.UpdateService", - index=3, - containing_service=None, - input_type=_UPDATESERVICEREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICE, - serialized_options=_b( - "\202\323\344\223\002,2!/v3/{service.name=*/*/services/*}:\007service" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteService", - full_name="google.monitoring.v3.ServiceMonitoringService.DeleteService", - index=4, - containing_service=None, - input_type=_DELETESERVICEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002\033*\031/v3/{name=*/*/services/*}" - ), - ), - _descriptor.MethodDescriptor( - name="CreateServiceLevelObjective", - full_name="google.monitoring.v3.ServiceMonitoringService.CreateServiceLevelObjective", - index=5, - containing_service=None, - input_type=_CREATESERVICELEVELOBJECTIVEREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICELEVELOBJECTIVE, - serialized_options=_b( - '\202\323\344\223\002M"2/v3/{parent=*/*/services/*}/serviceLevelObjectives:\027service_level_objective' - ), - ), - _descriptor.MethodDescriptor( - name="GetServiceLevelObjective", - full_name="google.monitoring.v3.ServiceMonitoringService.GetServiceLevelObjective", - index=6, - containing_service=None, - input_type=_GETSERVICELEVELOBJECTIVEREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICELEVELOBJECTIVE, - serialized_options=_b( - "\202\323\344\223\0024\0222/v3/{name=*/*/services/*/serviceLevelObjectives/*}" - ), - ), - _descriptor.MethodDescriptor( - name="ListServiceLevelObjectives", - full_name="google.monitoring.v3.ServiceMonitoringService.ListServiceLevelObjectives", - index=7, - containing_service=None, - input_type=_LISTSERVICELEVELOBJECTIVESREQUEST, - output_type=_LISTSERVICELEVELOBJECTIVESRESPONSE, - serialized_options=_b( - "\202\323\344\223\0024\0222/v3/{parent=*/*/services/*}/serviceLevelObjectives" - ), - ), - _descriptor.MethodDescriptor( - name="UpdateServiceLevelObjective", - full_name="google.monitoring.v3.ServiceMonitoringService.UpdateServiceLevelObjective", - index=8, - containing_service=None, - input_type=_UPDATESERVICELEVELOBJECTIVEREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2._SERVICELEVELOBJECTIVE, - serialized_options=_b( - "\202\323\344\223\002e2J/v3/{service_level_objective.name=*/*/services/*/serviceLevelObjectives/*}:\027service_level_objective" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteServiceLevelObjective", - full_name="google.monitoring.v3.ServiceMonitoringService.DeleteServiceLevelObjective", - index=9, - containing_service=None, - input_type=_DELETESERVICELEVELOBJECTIVEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\0024*2/v3/{name=*/*/services/*/serviceLevelObjectives/*}" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_SERVICEMONITORINGSERVICE) - -DESCRIPTOR.services_by_name["ServiceMonitoringService"] = _SERVICEMONITORINGSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/service_service_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/service_service_pb2_grpc.py deleted file mode 100644 index 3c03dc9c3706..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/service_service_pb2_grpc.py +++ /dev/null @@ -1,212 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.monitoring_v3.proto import ( - service_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2, -) -from google.cloud.monitoring_v3.proto import ( - service_service_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class ServiceMonitoringServiceStub(object): - """The Stackdriver Monitoring Service-Oriented Monitoring API has endpoints for - managing and querying aspects of a workspace's services. These include the - `Service`'s monitored resources, its Service-Level Objectives, and a taxonomy - of categorized Health Metrics. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateService = channel.unary_unary( - "/google.monitoring.v3.ServiceMonitoringService/CreateService", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.CreateServiceRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2.Service.FromString, - ) - self.GetService = channel.unary_unary( - "/google.monitoring.v3.ServiceMonitoringService/GetService", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.GetServiceRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2.Service.FromString, - ) - self.ListServices = channel.unary_unary( - "/google.monitoring.v3.ServiceMonitoringService/ListServices", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.ListServicesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.ListServicesResponse.FromString, - ) - self.UpdateService = channel.unary_unary( - "/google.monitoring.v3.ServiceMonitoringService/UpdateService", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.UpdateServiceRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2.Service.FromString, - ) - self.DeleteService = channel.unary_unary( - "/google.monitoring.v3.ServiceMonitoringService/DeleteService", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.DeleteServiceRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.CreateServiceLevelObjective = channel.unary_unary( - "/google.monitoring.v3.ServiceMonitoringService/CreateServiceLevelObjective", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.CreateServiceLevelObjectiveRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2.ServiceLevelObjective.FromString, - ) - self.GetServiceLevelObjective = channel.unary_unary( - "/google.monitoring.v3.ServiceMonitoringService/GetServiceLevelObjective", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.GetServiceLevelObjectiveRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2.ServiceLevelObjective.FromString, - ) - self.ListServiceLevelObjectives = channel.unary_unary( - "/google.monitoring.v3.ServiceMonitoringService/ListServiceLevelObjectives", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.ListServiceLevelObjectivesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.ListServiceLevelObjectivesResponse.FromString, - ) - self.UpdateServiceLevelObjective = channel.unary_unary( - "/google.monitoring.v3.ServiceMonitoringService/UpdateServiceLevelObjective", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.UpdateServiceLevelObjectiveRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2.ServiceLevelObjective.FromString, - ) - self.DeleteServiceLevelObjective = channel.unary_unary( - "/google.monitoring.v3.ServiceMonitoringService/DeleteServiceLevelObjective", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.DeleteServiceLevelObjectiveRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - -class ServiceMonitoringServiceServicer(object): - """The Stackdriver Monitoring Service-Oriented Monitoring API has endpoints for - managing and querying aspects of a workspace's services. These include the - `Service`'s monitored resources, its Service-Level Objectives, and a taxonomy - of categorized Health Metrics. - """ - - def CreateService(self, request, context): - """Create a `Service`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetService(self, request, context): - """Get the named `Service`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListServices(self, request, context): - """List `Service`s for this workspace. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateService(self, request, context): - """Update this `Service`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteService(self, request, context): - """Soft delete this `Service`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateServiceLevelObjective(self, request, context): - """Create a `ServiceLevelObjective` for the given `Service`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetServiceLevelObjective(self, request, context): - """Get a `ServiceLevelObjective` by name. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListServiceLevelObjectives(self, request, context): - """List the `ServiceLevelObjective`s for the given `Service`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateServiceLevelObjective(self, request, context): - """Update the given `ServiceLevelObjective`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteServiceLevelObjective(self, request, context): - """Delete the given `ServiceLevelObjective`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_ServiceMonitoringServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "CreateService": grpc.unary_unary_rpc_method_handler( - servicer.CreateService, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.CreateServiceRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2.Service.SerializeToString, - ), - "GetService": grpc.unary_unary_rpc_method_handler( - servicer.GetService, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.GetServiceRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2.Service.SerializeToString, - ), - "ListServices": grpc.unary_unary_rpc_method_handler( - servicer.ListServices, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.ListServicesRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.ListServicesResponse.SerializeToString, - ), - "UpdateService": grpc.unary_unary_rpc_method_handler( - servicer.UpdateService, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.UpdateServiceRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2.Service.SerializeToString, - ), - "DeleteService": grpc.unary_unary_rpc_method_handler( - servicer.DeleteService, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.DeleteServiceRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "CreateServiceLevelObjective": grpc.unary_unary_rpc_method_handler( - servicer.CreateServiceLevelObjective, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.CreateServiceLevelObjectiveRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2.ServiceLevelObjective.SerializeToString, - ), - "GetServiceLevelObjective": grpc.unary_unary_rpc_method_handler( - servicer.GetServiceLevelObjective, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.GetServiceLevelObjectiveRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2.ServiceLevelObjective.SerializeToString, - ), - "ListServiceLevelObjectives": grpc.unary_unary_rpc_method_handler( - servicer.ListServiceLevelObjectives, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.ListServiceLevelObjectivesRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.ListServiceLevelObjectivesResponse.SerializeToString, - ), - "UpdateServiceLevelObjective": grpc.unary_unary_rpc_method_handler( - servicer.UpdateServiceLevelObjective, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.UpdateServiceLevelObjectiveRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__pb2.ServiceLevelObjective.SerializeToString, - ), - "DeleteServiceLevelObjective": grpc.unary_unary_rpc_method_handler( - servicer.DeleteServiceLevelObjective, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_service__service__pb2.DeleteServiceLevelObjectiveRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.monitoring.v3.ServiceMonitoringService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/monitoring/google/cloud/monitoring_v3/proto/span_context.proto b/monitoring/google/cloud/monitoring_v3/proto/span_context.proto deleted file mode 100644 index cbcb8f72e878..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/span_context.proto +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "SpanContextProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// The context of a span, attached to google.api.Distribution.Exemplars -// in google.api.Distribution values during aggregation. -// -// It contains the name of a span with format: -// projects/[PROJECT_ID]/traces/[TRACE_ID]/spans/[SPAN_ID] -message SpanContext { - // The resource name of the span in the following format: - // - // projects/[PROJECT_ID]/traces/[TRACE_ID]/spans/[SPAN_ID] - // - // [TRACE_ID] is a unique identifier for a trace within a project; - // it is a 32-character hexadecimal encoding of a 16-byte array. - // - // [SPAN_ID] is a unique identifier for a span within a trace; it - // is a 16-character hexadecimal encoding of an 8-byte array. - string span_name = 1; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/span_context_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/span_context_pb2.py deleted file mode 100644 index d0a53f5f4a59..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/span_context_pb2.py +++ /dev/null @@ -1,102 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/span_context.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/span_context.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\020SpanContextProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n3google/cloud/monitoring_v3/proto/span_context.proto\x12\x14google.monitoring.v3" \n\x0bSpanContext\x12\x11\n\tspan_name\x18\x01 \x01(\tB\xa8\x01\n\x18\x63om.google.monitoring.v3B\x10SpanContextProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), -) - - -_SPANCONTEXT = _descriptor.Descriptor( - name="SpanContext", - full_name="google.monitoring.v3.SpanContext", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="span_name", - full_name="google.monitoring.v3.SpanContext.span_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=77, - serialized_end=109, -) - -DESCRIPTOR.message_types_by_name["SpanContext"] = _SPANCONTEXT -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -SpanContext = _reflection.GeneratedProtocolMessageType( - "SpanContext", - (_message.Message,), - dict( - DESCRIPTOR=_SPANCONTEXT, - __module__="google.cloud.monitoring_v3.proto.span_context_pb2", - __doc__="""The context of a span, attached to google.api.Distribution.Exemplars in - google.api.Distribution values during aggregation. - - It contains the name of a span with format: - projects/[PROJECT\_ID]/traces/[TRACE\_ID]/spans/[SPAN\_ID] - - - Attributes: - span_name: - The resource name of the span in the following format: :: - projects/[PROJECT_ID]/traces/[TRACE_ID]/spans/[SPAN_ID] - [TRACE\_ID] is a unique identifier for a trace within a - project; it is a 32-character hexadecimal encoding of a - 16-byte array. [SPAN\_ID] is a unique identifier for a span - within a trace; it is a 16-character hexadecimal encoding of - an 8-byte array. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.SpanContext) - ), -) -_sym_db.RegisterMessage(SpanContext) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/span_context_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/span_context_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/span_context_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/monitoring/google/cloud/monitoring_v3/proto/uptime.proto b/monitoring/google/cloud/monitoring_v3/proto/uptime.proto deleted file mode 100644 index 780a2578dff3..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/uptime.proto +++ /dev/null @@ -1,341 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -import "google/api/monitored_resource.proto"; -import "google/protobuf/duration.proto"; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "UptimeProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// An internal checker allows Uptime checks to run on private/internal GCP -// resources. -message InternalChecker { - option deprecated = true; - - // Operational states for an internal checker. - enum State { - // An internal checker should never be in the unspecified state. - UNSPECIFIED = 0; - - // The checker is being created, provisioned, and configured. A checker in - // this state can be returned by `ListInternalCheckers` or - // `GetInternalChecker`, as well as by examining the [long running - // Operation](https://cloud.google.com/apis/design/design_patterns#long_running_operations) - // that created it. - CREATING = 1; - - // The checker is running and available for use. A checker in this state - // can be returned by `ListInternalCheckers` or `GetInternalChecker` as - // well as by examining the [long running - // Operation](https://cloud.google.com/apis/design/design_patterns#long_running_operations) - // that created it. - // If a checker is being torn down, it is neither visible nor usable, so - // there is no "deleting" or "down" state. - RUNNING = 2; - } - - // A unique resource name for this InternalChecker. The format is: - // - // `projects/[PROJECT_ID]/internalCheckers/[INTERNAL_CHECKER_ID]`. - // - // `[PROJECT_ID]` is the Stackdriver Workspace project for the - // Uptime check config associated with the internal checker. - string name = 1; - - // The checker's human-readable name. The display name - // should be unique within a Stackdriver Workspace in order to make it easier - // to identify; however, uniqueness is not enforced. - string display_name = 2; - - // The [GCP VPC network](https://cloud.google.com/vpc/docs/vpc) where the - // internal resource lives (ex: "default"). - string network = 3; - - // The GCP zone the Uptime check should egress from. Only respected for - // internal Uptime checks, where internal_network is specified. - string gcp_zone = 4; - - // The GCP project ID where the internal checker lives. Not necessary - // the same as the Workspace project. - string peer_project_id = 6; - - // The current operational state of the internal checker. - State state = 7; -} - -// This message configures which resources and services to monitor for -// availability. -message UptimeCheckConfig { - // The resource submessage for group checks. It can be used instead of a - // monitored resource, when multiple resources are being monitored. - message ResourceGroup { - // The group of resources being monitored. Should be only the `[GROUP_ID]`, - // and not the full-path `projects/[PROJECT_ID]/groups/[GROUP_ID]`. - string group_id = 1; - - // The resource type of the group members. - GroupResourceType resource_type = 2; - } - - // Information involved in an HTTP/HTTPS Uptime check request. - message HttpCheck { - // The authentication parameters to provide to the specified resource or - // URL that requires a username and password. Currently, only - // [Basic HTTP authentication](https://tools.ietf.org/html/rfc7617) is - // supported in Uptime checks. - message BasicAuthentication { - // The username to use when authenticating with the HTTP server. - string username = 1; - - // The password to use when authenticating with the HTTP server. - string password = 2; - } - - // If `true`, use HTTPS instead of HTTP to run the check. - bool use_ssl = 1; - - // Optional (defaults to "/"). The path to the page against which to run - // the check. Will be combined with the `host` (specified within the - // `monitored_resource`) and `port` to construct the full URL. If the - // provided path does not begin with "/", a "/" will be prepended - // automatically. - string path = 2; - - // Optional (defaults to 80 when `use_ssl` is `false`, and 443 when - // `use_ssl` is `true`). The TCP port on the HTTP server against which to - // run the check. Will be combined with host (specified within the - // `monitored_resource`) and `path` to construct the full URL. - int32 port = 3; - - // The authentication information. Optional when creating an HTTP check; - // defaults to empty. - BasicAuthentication auth_info = 4; - - // Boolean specifiying whether to encrypt the header information. - // Encryption should be specified for any headers related to authentication - // that you do not wish to be seen when retrieving the configuration. The - // server will be responsible for encrypting the headers. - // On Get/List calls, if `mask_headers` is set to `true` then the headers - // will be obscured with `******.` - bool mask_headers = 5; - - // The list of headers to send as part of the Uptime check request. - // If two headers have the same key and different values, they should - // be entered as a single header, with the value being a comma-separated - // list of all the desired values as described at - // https://www.w3.org/Protocols/rfc2616/rfc2616.txt (page 31). - // Entering two separate headers with the same key in a Create call will - // cause the first to be overwritten by the second. - // The maximum number of headers allowed is 100. - map headers = 6; - - // Boolean specifying whether to include SSL certificate validation as a - // part of the Uptime check. Only applies to checks where - // `monitored_resource` is set to `uptime_url`. If `use_ssl` is `false`, - // setting `validate_ssl` to `true` has no effect. - bool validate_ssl = 7; - } - - // Information required for a TCP Uptime check request. - message TcpCheck { - // The TCP port on the server against which to run the check. Will be - // combined with host (specified within the `monitored_resource`) to - // construct the full URL. Required. - int32 port = 1; - } - - // Optional. Used to perform content matching. This allows matching based on - // substrings and regular expressions, together with their negations. Only the - // first 4 MB of an HTTP or HTTPS check's response (and the first - // 1 MB of a TCP check's response) are examined for purposes of content - // matching. - message ContentMatcher { - // Options to perform content matching. - enum ContentMatcherOption { - // No content matcher type specified (maintained for backward - // compatibility, but deprecated for future use). - // Treated as `CONTAINS_STRING`. - CONTENT_MATCHER_OPTION_UNSPECIFIED = 0; - - // Selects substring matching. The match succeeds if the output contains - // the `content` string. This is the default value for checks without - // a `matcher` option, or where the value of `matcher` is - // `CONTENT_MATCHER_OPTION_UNSPECIFIED`. - CONTAINS_STRING = 1; - - // Selects negation of substring matching. The match succeeds if the - // output does _NOT_ contain the `content` string. - NOT_CONTAINS_STRING = 2; - - // Selects regular-expression matching. The match succeeds of the output - // matches the regular expression specified in the `content` string. - MATCHES_REGEX = 3; - - // Selects negation of regular-expression matching. The match succeeds if - // the output does _NOT_ match the regular expression specified in the - // `content` string. - NOT_MATCHES_REGEX = 4; - } - - // String or regex content to match. Maximum 1024 bytes. An empty `content` - // string indicates no content matching is to be performed. - string content = 1; - - // The type of content matcher that will be applied to the server output, - // compared to the `content` string when the check is run. - ContentMatcherOption matcher = 2; - } - - // A unique resource name for this Uptime check configuration. The format is: - // - // `projects/[PROJECT_ID]/uptimeCheckConfigs/[UPTIME_CHECK_ID]`. - // - // This field should be omitted when creating the Uptime check configuration; - // on create, the resource name is assigned by the server and included in the - // response. - string name = 1; - - // A human-friendly name for the Uptime check configuration. The display name - // should be unique within a Stackdriver Workspace in order to make it easier - // to identify; however, uniqueness is not enforced. Required. - string display_name = 2; - - // The resource the check is checking. Required. - oneof resource { - // The [monitored - // resource](https://cloud.google.com/monitoring/api/resources) associated - // with the configuration. - // The following monitored resource types are supported for Uptime checks: - // `uptime_url`, - // `gce_instance`, - // `gae_app`, - // `aws_ec2_instance`, - // `aws_elb_load_balancer` - google.api.MonitoredResource monitored_resource = 3; - - // The group resource associated with the configuration. - ResourceGroup resource_group = 4; - } - - // The type of Uptime check request. - oneof check_request_type { - // Contains information needed to make an HTTP or HTTPS check. - HttpCheck http_check = 5; - - // Contains information needed to make a TCP check. - TcpCheck tcp_check = 6; - } - - // How often, in seconds, the Uptime check is performed. - // Currently, the only supported values are `60s` (1 minute), `300s` - // (5 minutes), `600s` (10 minutes), and `900s` (15 minutes). Optional, - // defaults to `60s`. - google.protobuf.Duration period = 7; - - // The maximum amount of time to wait for the request to complete (must be - // between 1 and 60 seconds). Required. - google.protobuf.Duration timeout = 8; - - // The content that is expected to appear in the data returned by the target - // server against which the check is run. Currently, only the first entry - // in the `content_matchers` list is supported, and additional entries will - // be ignored. This field is optional and should only be specified if a - // content match is required as part of the/ Uptime check. - repeated ContentMatcher content_matchers = 9; - - // The list of regions from which the check will be run. - // Some regions contain one location, and others contain more than one. - // If this field is specified, enough regions must be provided to include a - // minimum of 3 locations. Not specifying this field will result in Uptime - // checks running from all available regions. - repeated UptimeCheckRegion selected_regions = 10; - - // If this is `true`, then checks are made only from the 'internal_checkers'. - // If it is `false`, then checks are made only from the 'selected_regions'. - // It is an error to provide 'selected_regions' when is_internal is `true`, - // or to provide 'internal_checkers' when is_internal is `false`. - bool is_internal = 15 [deprecated = true]; - - // The internal checkers that this check will egress from. If `is_internal` is - // `true` and this list is empty, the check will egress from all the - // InternalCheckers configured for the project that owns this - // `UptimeCheckConfig`. - repeated InternalChecker internal_checkers = 14 [deprecated = true]; -} - -// The regions from which an Uptime check can be run. -enum UptimeCheckRegion { - // Default value if no region is specified. Will result in Uptime checks - // running from all regions. - REGION_UNSPECIFIED = 0; - - // Allows checks to run from locations within the United States of America. - USA = 1; - - // Allows checks to run from locations within the continent of Europe. - EUROPE = 2; - - // Allows checks to run from locations within the continent of South - // America. - SOUTH_AMERICA = 3; - - // Allows checks to run from locations within the Asia Pacific area (ex: - // Singapore). - ASIA_PACIFIC = 4; -} - -// Contains the region, location, and list of IP -// addresses where checkers in the location run from. -message UptimeCheckIp { - // A broad region category in which the IP address is located. - UptimeCheckRegion region = 1; - - // A more specific location within the region that typically encodes - // a particular city/town/metro (and its containing state/province or country) - // within the broader umbrella region category. - string location = 2; - - // The IP address from which the Uptime check originates. This is a fully - // specified IP address (not an IP address range). Most IP addresses, as of - // this publication, are in IPv4 format; however, one should not rely on the - // IP addresses being in IPv4 format indefinitely, and should support - // interpreting this field in either IPv4 or IPv6 format. - string ip_address = 3; -} - -// The supported resource types that can be used as values of -// `group_resource.resource_type`. -// `INSTANCE` includes `gce_instance` and `aws_ec2_instance` resource types. -// The resource types `gae_app` and `uptime_url` are not valid here because -// group checks on App Engine modules and URLs are not allowed. -enum GroupResourceType { - // Default value (not valid). - RESOURCE_TYPE_UNSPECIFIED = 0; - - // A group of instances from Google Cloud Platform (GCP) or - // Amazon Web Services (AWS). - INSTANCE = 1; - - // A group of Amazon ELB load balancers. - AWS_ELB_LOAD_BALANCER = 2; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/uptime_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/uptime_pb2.py deleted file mode 100644 index 2708316daf18..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/uptime_pb2.py +++ /dev/null @@ -1,1450 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/uptime.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/uptime.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\013UptimeProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n-google/cloud/monitoring_v3/proto/uptime.proto\x12\x14google.monitoring.v3\x1a#google/api/monitored_resource.proto\x1a\x1egoogle/protobuf/duration.proto"\xe6\x01\n\x0fInternalChecker\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x0f\n\x07network\x18\x03 \x01(\t\x12\x10\n\x08gcp_zone\x18\x04 \x01(\t\x12\x17\n\x0fpeer_project_id\x18\x06 \x01(\t\x12:\n\x05state\x18\x07 \x01(\x0e\x32+.google.monitoring.v3.InternalChecker.State"3\n\x05State\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02:\x02\x18\x01"\xd7\x0b\n\x11UptimeCheckConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12;\n\x12monitored_resource\x18\x03 \x01(\x0b\x32\x1d.google.api.MonitoredResourceH\x00\x12O\n\x0eresource_group\x18\x04 \x01(\x0b\x32\x35.google.monitoring.v3.UptimeCheckConfig.ResourceGroupH\x00\x12G\n\nhttp_check\x18\x05 \x01(\x0b\x32\x31.google.monitoring.v3.UptimeCheckConfig.HttpCheckH\x01\x12\x45\n\ttcp_check\x18\x06 \x01(\x0b\x32\x30.google.monitoring.v3.UptimeCheckConfig.TcpCheckH\x01\x12)\n\x06period\x18\x07 \x01(\x0b\x32\x19.google.protobuf.Duration\x12*\n\x07timeout\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12P\n\x10\x63ontent_matchers\x18\t \x03(\x0b\x32\x36.google.monitoring.v3.UptimeCheckConfig.ContentMatcher\x12\x41\n\x10selected_regions\x18\n \x03(\x0e\x32\'.google.monitoring.v3.UptimeCheckRegion\x12\x17\n\x0bis_internal\x18\x0f \x01(\x08\x42\x02\x18\x01\x12\x44\n\x11internal_checkers\x18\x0e \x03(\x0b\x32%.google.monitoring.v3.InternalCheckerB\x02\x18\x01\x1a\x61\n\rResourceGroup\x12\x10\n\x08group_id\x18\x01 \x01(\t\x12>\n\rresource_type\x18\x02 \x01(\x0e\x32\'.google.monitoring.v3.GroupResourceType\x1a\xfa\x02\n\tHttpCheck\x12\x0f\n\x07use_ssl\x18\x01 \x01(\x08\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\x05\x12X\n\tauth_info\x18\x04 \x01(\x0b\x32\x45.google.monitoring.v3.UptimeCheckConfig.HttpCheck.BasicAuthentication\x12\x14\n\x0cmask_headers\x18\x05 \x01(\x08\x12O\n\x07headers\x18\x06 \x03(\x0b\x32>.google.monitoring.v3.UptimeCheckConfig.HttpCheck.HeadersEntry\x12\x14\n\x0cvalidate_ssl\x18\x07 \x01(\x08\x1a\x39\n\x13\x42\x61sicAuthentication\x12\x10\n\x08username\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x18\n\x08TcpCheck\x12\x0c\n\x04port\x18\x01 \x01(\x05\x1a\x98\x02\n\x0e\x43ontentMatcher\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\\\n\x07matcher\x18\x02 \x01(\x0e\x32K.google.monitoring.v3.UptimeCheckConfig.ContentMatcher.ContentMatcherOption"\x96\x01\n\x14\x43ontentMatcherOption\x12&\n"CONTENT_MATCHER_OPTION_UNSPECIFIED\x10\x00\x12\x13\n\x0f\x43ONTAINS_STRING\x10\x01\x12\x17\n\x13NOT_CONTAINS_STRING\x10\x02\x12\x11\n\rMATCHES_REGEX\x10\x03\x12\x15\n\x11NOT_MATCHES_REGEX\x10\x04\x42\n\n\x08resourceB\x14\n\x12\x63heck_request_type"n\n\rUptimeCheckIp\x12\x37\n\x06region\x18\x01 \x01(\x0e\x32\'.google.monitoring.v3.UptimeCheckRegion\x12\x10\n\x08location\x18\x02 \x01(\t\x12\x12\n\nip_address\x18\x03 \x01(\t*e\n\x11UptimeCheckRegion\x12\x16\n\x12REGION_UNSPECIFIED\x10\x00\x12\x07\n\x03USA\x10\x01\x12\n\n\x06\x45UROPE\x10\x02\x12\x11\n\rSOUTH_AMERICA\x10\x03\x12\x10\n\x0c\x41SIA_PACIFIC\x10\x04*[\n\x11GroupResourceType\x12\x1d\n\x19RESOURCE_TYPE_UNSPECIFIED\x10\x00\x12\x0c\n\x08INSTANCE\x10\x01\x12\x19\n\x15\x41WS_ELB_LOAD_BALANCER\x10\x02\x42\xa3\x01\n\x18\x63om.google.monitoring.v3B\x0bUptimeProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - ], -) - -_UPTIMECHECKREGION = _descriptor.EnumDescriptor( - name="UptimeCheckRegion", - full_name="google.monitoring.v3.UptimeCheckRegion", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="REGION_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="USA", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="EUROPE", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SOUTH_AMERICA", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ASIA_PACIFIC", index=4, number=4, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1983, - serialized_end=2084, -) -_sym_db.RegisterEnumDescriptor(_UPTIMECHECKREGION) - -UptimeCheckRegion = enum_type_wrapper.EnumTypeWrapper(_UPTIMECHECKREGION) -_GROUPRESOURCETYPE = _descriptor.EnumDescriptor( - name="GroupResourceType", - full_name="google.monitoring.v3.GroupResourceType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="RESOURCE_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="INSTANCE", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="AWS_ELB_LOAD_BALANCER", - index=2, - number=2, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2086, - serialized_end=2177, -) -_sym_db.RegisterEnumDescriptor(_GROUPRESOURCETYPE) - -GroupResourceType = enum_type_wrapper.EnumTypeWrapper(_GROUPRESOURCETYPE) -REGION_UNSPECIFIED = 0 -USA = 1 -EUROPE = 2 -SOUTH_AMERICA = 3 -ASIA_PACIFIC = 4 -RESOURCE_TYPE_UNSPECIFIED = 0 -INSTANCE = 1 -AWS_ELB_LOAD_BALANCER = 2 - - -_INTERNALCHECKER_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.monitoring.v3.InternalChecker.State", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="UNSPECIFIED", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CREATING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=316, - serialized_end=367, -) -_sym_db.RegisterEnumDescriptor(_INTERNALCHECKER_STATE) - -_UPTIMECHECKCONFIG_CONTENTMATCHER_CONTENTMATCHEROPTION = _descriptor.EnumDescriptor( - name="ContentMatcherOption", - full_name="google.monitoring.v3.UptimeCheckConfig.ContentMatcher.ContentMatcherOption", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="CONTENT_MATCHER_OPTION_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="CONTAINS_STRING", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="NOT_CONTAINS_STRING", - index=2, - number=2, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="MATCHES_REGEX", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="NOT_MATCHES_REGEX", - index=4, - number=4, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1685, - serialized_end=1835, -) -_sym_db.RegisterEnumDescriptor(_UPTIMECHECKCONFIG_CONTENTMATCHER_CONTENTMATCHEROPTION) - - -_INTERNALCHECKER = _descriptor.Descriptor( - name="InternalChecker", - full_name="google.monitoring.v3.InternalChecker", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.InternalChecker.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.monitoring.v3.InternalChecker.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="network", - full_name="google.monitoring.v3.InternalChecker.network", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="gcp_zone", - full_name="google.monitoring.v3.InternalChecker.gcp_zone", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="peer_project_id", - full_name="google.monitoring.v3.InternalChecker.peer_project_id", - index=4, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.monitoring.v3.InternalChecker.state", - index=5, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_INTERNALCHECKER_STATE], - serialized_options=_b("\030\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=141, - serialized_end=371, -) - - -_UPTIMECHECKCONFIG_RESOURCEGROUP = _descriptor.Descriptor( - name="ResourceGroup", - full_name="google.monitoring.v3.UptimeCheckConfig.ResourceGroup", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="group_id", - full_name="google.monitoring.v3.UptimeCheckConfig.ResourceGroup.group_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource_type", - full_name="google.monitoring.v3.UptimeCheckConfig.ResourceGroup.resource_type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1048, - serialized_end=1145, -) - -_UPTIMECHECKCONFIG_HTTPCHECK_BASICAUTHENTICATION = _descriptor.Descriptor( - name="BasicAuthentication", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck.BasicAuthentication", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="username", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck.BasicAuthentication.username", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="password", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck.BasicAuthentication.password", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1421, - serialized_end=1478, -) - -_UPTIMECHECKCONFIG_HTTPCHECK_HEADERSENTRY = _descriptor.Descriptor( - name="HeadersEntry", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck.HeadersEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck.HeadersEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck.HeadersEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1480, - serialized_end=1526, -) - -_UPTIMECHECKCONFIG_HTTPCHECK = _descriptor.Descriptor( - name="HttpCheck", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="use_ssl", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck.use_ssl", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="path", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck.path", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="port", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck.port", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="auth_info", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck.auth_info", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="mask_headers", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck.mask_headers", - index=4, - number=5, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="headers", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck.headers", - index=5, - number=6, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="validate_ssl", - full_name="google.monitoring.v3.UptimeCheckConfig.HttpCheck.validate_ssl", - index=6, - number=7, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _UPTIMECHECKCONFIG_HTTPCHECK_BASICAUTHENTICATION, - _UPTIMECHECKCONFIG_HTTPCHECK_HEADERSENTRY, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1148, - serialized_end=1526, -) - -_UPTIMECHECKCONFIG_TCPCHECK = _descriptor.Descriptor( - name="TcpCheck", - full_name="google.monitoring.v3.UptimeCheckConfig.TcpCheck", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="port", - full_name="google.monitoring.v3.UptimeCheckConfig.TcpCheck.port", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1528, - serialized_end=1552, -) - -_UPTIMECHECKCONFIG_CONTENTMATCHER = _descriptor.Descriptor( - name="ContentMatcher", - full_name="google.monitoring.v3.UptimeCheckConfig.ContentMatcher", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="content", - full_name="google.monitoring.v3.UptimeCheckConfig.ContentMatcher.content", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="matcher", - full_name="google.monitoring.v3.UptimeCheckConfig.ContentMatcher.matcher", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_UPTIMECHECKCONFIG_CONTENTMATCHER_CONTENTMATCHEROPTION], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1555, - serialized_end=1835, -) - -_UPTIMECHECKCONFIG = _descriptor.Descriptor( - name="UptimeCheckConfig", - full_name="google.monitoring.v3.UptimeCheckConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.UptimeCheckConfig.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.monitoring.v3.UptimeCheckConfig.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="monitored_resource", - full_name="google.monitoring.v3.UptimeCheckConfig.monitored_resource", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource_group", - full_name="google.monitoring.v3.UptimeCheckConfig.resource_group", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="http_check", - full_name="google.monitoring.v3.UptimeCheckConfig.http_check", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="tcp_check", - full_name="google.monitoring.v3.UptimeCheckConfig.tcp_check", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="period", - full_name="google.monitoring.v3.UptimeCheckConfig.period", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="timeout", - full_name="google.monitoring.v3.UptimeCheckConfig.timeout", - index=7, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="content_matchers", - full_name="google.monitoring.v3.UptimeCheckConfig.content_matchers", - index=8, - number=9, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="selected_regions", - full_name="google.monitoring.v3.UptimeCheckConfig.selected_regions", - index=9, - number=10, - type=14, - cpp_type=8, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="is_internal", - full_name="google.monitoring.v3.UptimeCheckConfig.is_internal", - index=10, - number=15, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="internal_checkers", - full_name="google.monitoring.v3.UptimeCheckConfig.internal_checkers", - index=11, - number=14, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _UPTIMECHECKCONFIG_RESOURCEGROUP, - _UPTIMECHECKCONFIG_HTTPCHECK, - _UPTIMECHECKCONFIG_TCPCHECK, - _UPTIMECHECKCONFIG_CONTENTMATCHER, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="resource", - full_name="google.monitoring.v3.UptimeCheckConfig.resource", - index=0, - containing_type=None, - fields=[], - ), - _descriptor.OneofDescriptor( - name="check_request_type", - full_name="google.monitoring.v3.UptimeCheckConfig.check_request_type", - index=1, - containing_type=None, - fields=[], - ), - ], - serialized_start=374, - serialized_end=1869, -) - - -_UPTIMECHECKIP = _descriptor.Descriptor( - name="UptimeCheckIp", - full_name="google.monitoring.v3.UptimeCheckIp", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="region", - full_name="google.monitoring.v3.UptimeCheckIp.region", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="location", - full_name="google.monitoring.v3.UptimeCheckIp.location", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="ip_address", - full_name="google.monitoring.v3.UptimeCheckIp.ip_address", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1871, - serialized_end=1981, -) - -_INTERNALCHECKER.fields_by_name["state"].enum_type = _INTERNALCHECKER_STATE -_INTERNALCHECKER_STATE.containing_type = _INTERNALCHECKER -_UPTIMECHECKCONFIG_RESOURCEGROUP.fields_by_name[ - "resource_type" -].enum_type = _GROUPRESOURCETYPE -_UPTIMECHECKCONFIG_RESOURCEGROUP.containing_type = _UPTIMECHECKCONFIG -_UPTIMECHECKCONFIG_HTTPCHECK_BASICAUTHENTICATION.containing_type = ( - _UPTIMECHECKCONFIG_HTTPCHECK -) -_UPTIMECHECKCONFIG_HTTPCHECK_HEADERSENTRY.containing_type = _UPTIMECHECKCONFIG_HTTPCHECK -_UPTIMECHECKCONFIG_HTTPCHECK.fields_by_name[ - "auth_info" -].message_type = _UPTIMECHECKCONFIG_HTTPCHECK_BASICAUTHENTICATION -_UPTIMECHECKCONFIG_HTTPCHECK.fields_by_name[ - "headers" -].message_type = _UPTIMECHECKCONFIG_HTTPCHECK_HEADERSENTRY -_UPTIMECHECKCONFIG_HTTPCHECK.containing_type = _UPTIMECHECKCONFIG -_UPTIMECHECKCONFIG_TCPCHECK.containing_type = _UPTIMECHECKCONFIG -_UPTIMECHECKCONFIG_CONTENTMATCHER.fields_by_name[ - "matcher" -].enum_type = _UPTIMECHECKCONFIG_CONTENTMATCHER_CONTENTMATCHEROPTION -_UPTIMECHECKCONFIG_CONTENTMATCHER.containing_type = _UPTIMECHECKCONFIG -_UPTIMECHECKCONFIG_CONTENTMATCHER_CONTENTMATCHEROPTION.containing_type = ( - _UPTIMECHECKCONFIG_CONTENTMATCHER -) -_UPTIMECHECKCONFIG.fields_by_name[ - "monitored_resource" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE -_UPTIMECHECKCONFIG.fields_by_name[ - "resource_group" -].message_type = _UPTIMECHECKCONFIG_RESOURCEGROUP -_UPTIMECHECKCONFIG.fields_by_name[ - "http_check" -].message_type = _UPTIMECHECKCONFIG_HTTPCHECK -_UPTIMECHECKCONFIG.fields_by_name[ - "tcp_check" -].message_type = _UPTIMECHECKCONFIG_TCPCHECK -_UPTIMECHECKCONFIG.fields_by_name[ - "period" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_UPTIMECHECKCONFIG.fields_by_name[ - "timeout" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_UPTIMECHECKCONFIG.fields_by_name[ - "content_matchers" -].message_type = _UPTIMECHECKCONFIG_CONTENTMATCHER -_UPTIMECHECKCONFIG.fields_by_name["selected_regions"].enum_type = _UPTIMECHECKREGION -_UPTIMECHECKCONFIG.fields_by_name["internal_checkers"].message_type = _INTERNALCHECKER -_UPTIMECHECKCONFIG.oneofs_by_name["resource"].fields.append( - _UPTIMECHECKCONFIG.fields_by_name["monitored_resource"] -) -_UPTIMECHECKCONFIG.fields_by_name[ - "monitored_resource" -].containing_oneof = _UPTIMECHECKCONFIG.oneofs_by_name["resource"] -_UPTIMECHECKCONFIG.oneofs_by_name["resource"].fields.append( - _UPTIMECHECKCONFIG.fields_by_name["resource_group"] -) -_UPTIMECHECKCONFIG.fields_by_name[ - "resource_group" -].containing_oneof = _UPTIMECHECKCONFIG.oneofs_by_name["resource"] -_UPTIMECHECKCONFIG.oneofs_by_name["check_request_type"].fields.append( - _UPTIMECHECKCONFIG.fields_by_name["http_check"] -) -_UPTIMECHECKCONFIG.fields_by_name[ - "http_check" -].containing_oneof = _UPTIMECHECKCONFIG.oneofs_by_name["check_request_type"] -_UPTIMECHECKCONFIG.oneofs_by_name["check_request_type"].fields.append( - _UPTIMECHECKCONFIG.fields_by_name["tcp_check"] -) -_UPTIMECHECKCONFIG.fields_by_name[ - "tcp_check" -].containing_oneof = _UPTIMECHECKCONFIG.oneofs_by_name["check_request_type"] -_UPTIMECHECKIP.fields_by_name["region"].enum_type = _UPTIMECHECKREGION -DESCRIPTOR.message_types_by_name["InternalChecker"] = _INTERNALCHECKER -DESCRIPTOR.message_types_by_name["UptimeCheckConfig"] = _UPTIMECHECKCONFIG -DESCRIPTOR.message_types_by_name["UptimeCheckIp"] = _UPTIMECHECKIP -DESCRIPTOR.enum_types_by_name["UptimeCheckRegion"] = _UPTIMECHECKREGION -DESCRIPTOR.enum_types_by_name["GroupResourceType"] = _GROUPRESOURCETYPE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -InternalChecker = _reflection.GeneratedProtocolMessageType( - "InternalChecker", - (_message.Message,), - dict( - DESCRIPTOR=_INTERNALCHECKER, - __module__="google.cloud.monitoring_v3.proto.uptime_pb2", - __doc__="""An internal checker allows Uptime checks to run on - private/internal GCP resources. - - - Attributes: - name: - A unique resource name for this InternalChecker. The format - is: ``projects/[PROJECT_ID]/internalCheckers/[INTERNAL_CHECKE - R_ID]``. ``[PROJECT_ID]`` is the Stackdriver Workspace - project for the Uptime check config associated with the - internal checker. - display_name: - The checker's human-readable name. The display name should be - unique within a Stackdriver Workspace in order to make it - easier to identify; however, uniqueness is not enforced. - network: - The `GCP VPC network - `__ where the internal - resource lives (ex: "default"). - gcp_zone: - The GCP zone the Uptime check should egress from. Only - respected for internal Uptime checks, where internal\_network - is specified. - peer_project_id: - The GCP project ID where the internal checker lives. Not - necessary the same as the Workspace project. - state: - The current operational state of the internal checker. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.InternalChecker) - ), -) -_sym_db.RegisterMessage(InternalChecker) - -UptimeCheckConfig = _reflection.GeneratedProtocolMessageType( - "UptimeCheckConfig", - (_message.Message,), - dict( - ResourceGroup=_reflection.GeneratedProtocolMessageType( - "ResourceGroup", - (_message.Message,), - dict( - DESCRIPTOR=_UPTIMECHECKCONFIG_RESOURCEGROUP, - __module__="google.cloud.monitoring_v3.proto.uptime_pb2", - __doc__="""The resource submessage for group checks. It can be used - instead of a monitored resource, when multiple resources are being - monitored. - - - Attributes: - group_id: - The group of resources being monitored. Should be only the - ``[GROUP_ID]``, and not the full-path - ``projects/[PROJECT_ID]/groups/[GROUP_ID]``. - resource_type: - The resource type of the group members. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UptimeCheckConfig.ResourceGroup) - ), - ), - HttpCheck=_reflection.GeneratedProtocolMessageType( - "HttpCheck", - (_message.Message,), - dict( - BasicAuthentication=_reflection.GeneratedProtocolMessageType( - "BasicAuthentication", - (_message.Message,), - dict( - DESCRIPTOR=_UPTIMECHECKCONFIG_HTTPCHECK_BASICAUTHENTICATION, - __module__="google.cloud.monitoring_v3.proto.uptime_pb2", - __doc__="""The authentication parameters to provide to the specified - resource or URL that requires a username and password. Currently, only - `Basic HTTP authentication `__ is - supported in Uptime checks. - - - Attributes: - username: - The username to use when authenticating with the HTTP server. - password: - The password to use when authenticating with the HTTP server. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UptimeCheckConfig.HttpCheck.BasicAuthentication) - ), - ), - HeadersEntry=_reflection.GeneratedProtocolMessageType( - "HeadersEntry", - (_message.Message,), - dict( - DESCRIPTOR=_UPTIMECHECKCONFIG_HTTPCHECK_HEADERSENTRY, - __module__="google.cloud.monitoring_v3.proto.uptime_pb2" - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UptimeCheckConfig.HttpCheck.HeadersEntry) - ), - ), - DESCRIPTOR=_UPTIMECHECKCONFIG_HTTPCHECK, - __module__="google.cloud.monitoring_v3.proto.uptime_pb2", - __doc__="""Information involved in an HTTP/HTTPS Uptime check - request. - - - Attributes: - use_ssl: - If ``true``, use HTTPS instead of HTTP to run the check. - path: - Optional (defaults to "/"). The path to the page against which - to run the check. Will be combined with the ``host`` - (specified within the ``monitored_resource``) and ``port`` to - construct the full URL. If the provided path does not begin - with "/", a "/" will be prepended automatically. - port: - Optional (defaults to 80 when ``use_ssl`` is ``false``, and - 443 when ``use_ssl`` is ``true``). The TCP port on the HTTP - server against which to run the check. Will be combined with - host (specified within the ``monitored_resource``) and - ``path`` to construct the full URL. - auth_info: - The authentication information. Optional when creating an HTTP - check; defaults to empty. - mask_headers: - Boolean specifiying whether to encrypt the header information. - Encryption should be specified for any headers related to - authentication that you do not wish to be seen when retrieving - the configuration. The server will be responsible for - encrypting the headers. On Get/List calls, if ``mask_headers`` - is set to ``true`` then the headers will be obscured with - ``******.`` - headers: - The list of headers to send as part of the Uptime check - request. If two headers have the same key and different - values, they should be entered as a single header, with the - value being a comma-separated list of all the desired values - as described at - https://www.w3.org/Protocols/rfc2616/rfc2616.txt (page 31). - Entering two separate headers with the same key in a Create - call will cause the first to be overwritten by the second. The - maximum number of headers allowed is 100. - validate_ssl: - Boolean specifying whether to include SSL certificate - validation as a part of the Uptime check. Only applies to - checks where ``monitored_resource`` is set to ``uptime_url``. - If ``use_ssl`` is ``false``, setting ``validate_ssl`` to - ``true`` has no effect. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UptimeCheckConfig.HttpCheck) - ), - ), - TcpCheck=_reflection.GeneratedProtocolMessageType( - "TcpCheck", - (_message.Message,), - dict( - DESCRIPTOR=_UPTIMECHECKCONFIG_TCPCHECK, - __module__="google.cloud.monitoring_v3.proto.uptime_pb2", - __doc__="""Information required for a TCP Uptime check request. - - - Attributes: - port: - The TCP port on the server against which to run the check. - Will be combined with host (specified within the - ``monitored_resource``) to construct the full URL. Required. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UptimeCheckConfig.TcpCheck) - ), - ), - ContentMatcher=_reflection.GeneratedProtocolMessageType( - "ContentMatcher", - (_message.Message,), - dict( - DESCRIPTOR=_UPTIMECHECKCONFIG_CONTENTMATCHER, - __module__="google.cloud.monitoring_v3.proto.uptime_pb2", - __doc__="""Optional. Used to perform content matching. This allows - matching based on substrings and regular expressions, together with - their negations. Only the first 4 MB of an HTTP or HTTPS check's - response (and the first 1 MB of a TCP check's response) are examined for - purposes of content matching. - - - Attributes: - content: - String or regex content to match. Maximum 1024 bytes. An empty - ``content`` string indicates no content matching is to be - performed. - matcher: - The type of content matcher that will be applied to the server - output, compared to the ``content`` string when the check is - run. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UptimeCheckConfig.ContentMatcher) - ), - ), - DESCRIPTOR=_UPTIMECHECKCONFIG, - __module__="google.cloud.monitoring_v3.proto.uptime_pb2", - __doc__="""This message configures which resources and services to - monitor for availability. - - - Attributes: - name: - A unique resource name for this Uptime check configuration. - The format is: ``projects/[PROJECT_ID]/uptimeCheckConfigs/[UP - TIME_CHECK_ID]``. This field should be omitted when creating - the Uptime check configuration; on create, the resource name - is assigned by the server and included in the response. - display_name: - A human-friendly name for the Uptime check configuration. The - display name should be unique within a Stackdriver Workspace - in order to make it easier to identify; however, uniqueness is - not enforced. Required. - resource: - The resource the check is checking. Required. - monitored_resource: - The `monitored resource - `__ - associated with the configuration. The following monitored - resource types are supported for Uptime checks: - ``uptime_url``, ``gce_instance``, ``gae_app``, - ``aws_ec2_instance``, ``aws_elb_load_balancer`` - resource_group: - The group resource associated with the configuration. - check_request_type: - The type of Uptime check request. - http_check: - Contains information needed to make an HTTP or HTTPS check. - tcp_check: - Contains information needed to make a TCP check. - period: - How often, in seconds, the Uptime check is performed. - Currently, the only supported values are ``60s`` (1 minute), - ``300s`` (5 minutes), ``600s`` (10 minutes), and ``900s`` (15 - minutes). Optional, defaults to ``60s``. - timeout: - The maximum amount of time to wait for the request to complete - (must be between 1 and 60 seconds). Required. - content_matchers: - The content that is expected to appear in the data returned by - the target server against which the check is run. Currently, - only the first entry in the ``content_matchers`` list is - supported, and additional entries will be ignored. This field - is optional and should only be specified if a content match is - required as part of the/ Uptime check. - selected_regions: - The list of regions from which the check will be run. Some - regions contain one location, and others contain more than - one. If this field is specified, enough regions must be - provided to include a minimum of 3 locations. Not specifying - this field will result in Uptime checks running from all - available regions. - is_internal: - If this is ``true``, then checks are made only from the - 'internal\_checkers'. If it is ``false``, then checks are made - only from the 'selected\_regions'. It is an error to provide - 'selected\_regions' when is\_internal is ``true``, or to - provide 'internal\_checkers' when is\_internal is ``false``. - internal_checkers: - The internal checkers that this check will egress from. If - ``is_internal`` is ``true`` and this list is empty, the check - will egress from all the InternalCheckers configured for the - project that owns this ``UptimeCheckConfig``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UptimeCheckConfig) - ), -) -_sym_db.RegisterMessage(UptimeCheckConfig) -_sym_db.RegisterMessage(UptimeCheckConfig.ResourceGroup) -_sym_db.RegisterMessage(UptimeCheckConfig.HttpCheck) -_sym_db.RegisterMessage(UptimeCheckConfig.HttpCheck.BasicAuthentication) -_sym_db.RegisterMessage(UptimeCheckConfig.HttpCheck.HeadersEntry) -_sym_db.RegisterMessage(UptimeCheckConfig.TcpCheck) -_sym_db.RegisterMessage(UptimeCheckConfig.ContentMatcher) - -UptimeCheckIp = _reflection.GeneratedProtocolMessageType( - "UptimeCheckIp", - (_message.Message,), - dict( - DESCRIPTOR=_UPTIMECHECKIP, - __module__="google.cloud.monitoring_v3.proto.uptime_pb2", - __doc__="""Contains the region, location, and list of IP addresses - where checkers in the location run from. - - - Attributes: - region: - A broad region category in which the IP address is located. - location: - A more specific location within the region that typically - encodes a particular city/town/metro (and its containing - state/province or country) within the broader umbrella region - category. - ip_address: - The IP address from which the Uptime check originates. This is - a fully specified IP address (not an IP address range). Most - IP addresses, as of this publication, are in IPv4 format; - however, one should not rely on the IP addresses being in IPv4 - format indefinitely, and should support interpreting this - field in either IPv4 or IPv6 format. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UptimeCheckIp) - ), -) -_sym_db.RegisterMessage(UptimeCheckIp) - - -DESCRIPTOR._options = None -_INTERNALCHECKER._options = None -_UPTIMECHECKCONFIG_HTTPCHECK_HEADERSENTRY._options = None -_UPTIMECHECKCONFIG.fields_by_name["is_internal"]._options = None -_UPTIMECHECKCONFIG.fields_by_name["internal_checkers"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/uptime_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/uptime_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/uptime_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/monitoring/google/cloud/monitoring_v3/proto/uptime_service.proto b/monitoring/google/cloud/monitoring_v3/proto/uptime_service.proto deleted file mode 100644 index 203db1864112..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/uptime_service.proto +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.monitoring.v3; - -import "google/api/annotations.proto"; -import "google/monitoring/v3/uptime.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/api/client.proto"; - -option csharp_namespace = "Google.Cloud.Monitoring.V3"; -option go_package = "google.golang.org/genproto/googleapis/monitoring/v3;monitoring"; -option java_multiple_files = true; -option java_outer_classname = "UptimeServiceProto"; -option java_package = "com.google.monitoring.v3"; -option php_namespace = "Google\\Cloud\\Monitoring\\V3"; - -// The UptimeCheckService API is used to manage (list, create, delete, edit) -// Uptime check configurations in the Stackdriver Monitoring product. An Uptime -// check is a piece of configuration that determines which resources and -// services to monitor for availability. These configurations can also be -// configured interactively by navigating to the [Cloud Console] -// (http://console.cloud.google.com), selecting the appropriate project, -// clicking on "Monitoring" on the left-hand side to navigate to Stackdriver, -// and then clicking on "Uptime". -service UptimeCheckService { - option (google.api.default_host) = "monitoring.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/monitoring," - "https://www.googleapis.com/auth/monitoring.read"; - - // Lists the existing valid Uptime check configurations for the project - // (leaving out any invalid configurations). - rpc ListUptimeCheckConfigs(ListUptimeCheckConfigsRequest) returns (ListUptimeCheckConfigsResponse) { - option (google.api.http) = { - get: "/v3/{parent=projects/*}/uptimeCheckConfigs" - }; - } - - // Gets a single Uptime check configuration. - rpc GetUptimeCheckConfig(GetUptimeCheckConfigRequest) returns (UptimeCheckConfig) { - option (google.api.http) = { - get: "/v3/{name=projects/*/uptimeCheckConfigs/*}" - }; - } - - // Creates a new Uptime check configuration. - rpc CreateUptimeCheckConfig(CreateUptimeCheckConfigRequest) returns (UptimeCheckConfig) { - option (google.api.http) = { - post: "/v3/{parent=projects/*}/uptimeCheckConfigs" - body: "uptime_check_config" - }; - } - - // Updates an Uptime check configuration. You can either replace the entire - // configuration with a new one or replace only certain fields in the current - // configuration by specifying the fields to be updated via `updateMask`. - // Returns the updated configuration. - rpc UpdateUptimeCheckConfig(UpdateUptimeCheckConfigRequest) returns (UptimeCheckConfig) { - option (google.api.http) = { - patch: "/v3/{uptime_check_config.name=projects/*/uptimeCheckConfigs/*}" - body: "uptime_check_config" - }; - } - - // Deletes an Uptime check configuration. Note that this method will fail - // if the Uptime check configuration is referenced by an alert policy or - // other dependent configs that would be rendered invalid by the deletion. - rpc DeleteUptimeCheckConfig(DeleteUptimeCheckConfigRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v3/{name=projects/*/uptimeCheckConfigs/*}" - }; - } - - // Returns the list of IP addresses that checkers run from - rpc ListUptimeCheckIps(ListUptimeCheckIpsRequest) returns (ListUptimeCheckIpsResponse) { - option (google.api.http) = { - get: "/v3/uptimeCheckIps" - }; - } -} - -// The protocol for the `ListUptimeCheckConfigs` request. -message ListUptimeCheckConfigsRequest { - // The project whose Uptime check configurations are listed. The format - // is `projects/[PROJECT_ID]`. - string parent = 1; - - // The maximum number of results to return in a single response. The server - // may further constrain the maximum number of results returned in a single - // page. If the page_size is <=0, the server will decide the number of results - // to be returned. - int32 page_size = 3; - - // If this field is not empty then it must contain the `nextPageToken` value - // returned by a previous call to this method. Using this field causes the - // method to return more results from the previous method call. - string page_token = 4; -} - -// The protocol for the `ListUptimeCheckConfigs` response. -message ListUptimeCheckConfigsResponse { - // The returned Uptime check configurations. - repeated UptimeCheckConfig uptime_check_configs = 1; - - // This field represents the pagination token to retrieve the next page of - // results. If the value is empty, it means no further results for the - // request. To retrieve the next page of results, the value of the - // next_page_token is passed to the subsequent List method call (in the - // request message's page_token field). - string next_page_token = 2; - - // The total number of Uptime check configurations for the project, - // irrespective of any pagination. - int32 total_size = 3; -} - -// The protocol for the `GetUptimeCheckConfig` request. -message GetUptimeCheckConfigRequest { - // The Uptime check configuration to retrieve. The format - // is `projects/[PROJECT_ID]/uptimeCheckConfigs/[UPTIME_CHECK_ID]`. - string name = 1; -} - -// The protocol for the `CreateUptimeCheckConfig` request. -message CreateUptimeCheckConfigRequest { - // The project in which to create the Uptime check. The format - // is `projects/[PROJECT_ID]`. - string parent = 1; - - // The new Uptime check configuration. - UptimeCheckConfig uptime_check_config = 2; -} - -// The protocol for the `UpdateUptimeCheckConfig` request. -message UpdateUptimeCheckConfigRequest { - // Optional. If present, only the listed fields in the current Uptime check - // configuration are updated with values from the new configuration. If this - // field is empty, then the current configuration is completely replaced with - // the new configuration. - google.protobuf.FieldMask update_mask = 2; - - // Required. If an `updateMask` has been specified, this field gives - // the values for the set of fields mentioned in the `updateMask`. If an - // `updateMask` has not been given, this Uptime check configuration replaces - // the current configuration. If a field is mentioned in `updateMask` but - // the corresonding field is omitted in this partial Uptime check - // configuration, it has the effect of deleting/clearing the field from the - // configuration on the server. - // - // The following fields can be updated: `display_name`, - // `http_check`, `tcp_check`, `timeout`, `content_matchers`, and - // `selected_regions`. - UptimeCheckConfig uptime_check_config = 3; -} - -// The protocol for the `DeleteUptimeCheckConfig` request. -message DeleteUptimeCheckConfigRequest { - // The Uptime check configuration to delete. The format - // is `projects/[PROJECT_ID]/uptimeCheckConfigs/[UPTIME_CHECK_ID]`. - string name = 1; -} - -// The protocol for the `ListUptimeCheckIps` request. -message ListUptimeCheckIpsRequest { - // The maximum number of results to return in a single response. The server - // may further constrain the maximum number of results returned in a single - // page. If the page_size is <=0, the server will decide the number of results - // to be returned. - // NOTE: this field is not yet implemented - int32 page_size = 2; - - // If this field is not empty then it must contain the `nextPageToken` value - // returned by a previous call to this method. Using this field causes the - // method to return more results from the previous method call. - // NOTE: this field is not yet implemented - string page_token = 3; -} - -// The protocol for the `ListUptimeCheckIps` response. -message ListUptimeCheckIpsResponse { - // The returned list of IP addresses (including region and location) that the - // checkers run from. - repeated UptimeCheckIp uptime_check_ips = 1; - - // This field represents the pagination token to retrieve the next page of - // results. If the value is empty, it means no further results for the - // request. To retrieve the next page of results, the value of the - // next_page_token is passed to the subsequent List method call (in the - // request message's page_token field). - // NOTE: this field is not yet implemented - string next_page_token = 2; -} diff --git a/monitoring/google/cloud/monitoring_v3/proto/uptime_service_pb2.py b/monitoring/google/cloud/monitoring_v3/proto/uptime_service_pb2.py deleted file mode 100644 index 1cdf48be0bab..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/uptime_service_pb2.py +++ /dev/null @@ -1,840 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_v3/proto/uptime_service.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.monitoring_v3.proto import ( - uptime_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_v3/proto/uptime_service.proto", - package="google.monitoring.v3", - syntax="proto3", - serialized_options=_b( - "\n\030com.google.monitoring.v3B\022UptimeServiceProtoP\001Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\252\002\032Google.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\Monitoring\\V3" - ), - serialized_pb=_b( - '\n5google/cloud/monitoring_v3/proto/uptime_service.proto\x12\x14google.monitoring.v3\x1a\x1cgoogle/api/annotations.proto\x1a-google/cloud/monitoring_v3/proto/uptime.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/api/client.proto"V\n\x1dListUptimeCheckConfigsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"\x94\x01\n\x1eListUptimeCheckConfigsResponse\x12\x45\n\x14uptime_check_configs\x18\x01 \x03(\x0b\x32\'.google.monitoring.v3.UptimeCheckConfig\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12\x12\n\ntotal_size\x18\x03 \x01(\x05"+\n\x1bGetUptimeCheckConfigRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"v\n\x1e\x43reateUptimeCheckConfigRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x44\n\x13uptime_check_config\x18\x02 \x01(\x0b\x32\'.google.monitoring.v3.UptimeCheckConfig"\x97\x01\n\x1eUpdateUptimeCheckConfigRequest\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x44\n\x13uptime_check_config\x18\x03 \x01(\x0b\x32\'.google.monitoring.v3.UptimeCheckConfig".\n\x1e\x44\x65leteUptimeCheckConfigRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"B\n\x19ListUptimeCheckIpsRequest\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"t\n\x1aListUptimeCheckIpsResponse\x12=\n\x10uptime_check_ips\x18\x01 \x03(\x0b\x32#.google.monitoring.v3.UptimeCheckIp\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xf3\t\n\x12UptimeCheckService\x12\xb7\x01\n\x16ListUptimeCheckConfigs\x12\x33.google.monitoring.v3.ListUptimeCheckConfigsRequest\x1a\x34.google.monitoring.v3.ListUptimeCheckConfigsResponse"2\x82\xd3\xe4\x93\x02,\x12*/v3/{parent=projects/*}/uptimeCheckConfigs\x12\xa6\x01\n\x14GetUptimeCheckConfig\x12\x31.google.monitoring.v3.GetUptimeCheckConfigRequest\x1a\'.google.monitoring.v3.UptimeCheckConfig"2\x82\xd3\xe4\x93\x02,\x12*/v3/{name=projects/*/uptimeCheckConfigs/*}\x12\xc1\x01\n\x17\x43reateUptimeCheckConfig\x12\x34.google.monitoring.v3.CreateUptimeCheckConfigRequest\x1a\'.google.monitoring.v3.UptimeCheckConfig"G\x82\xd3\xe4\x93\x02\x41"*/v3/{parent=projects/*}/uptimeCheckConfigs:\x13uptime_check_config\x12\xd5\x01\n\x17UpdateUptimeCheckConfig\x12\x34.google.monitoring.v3.UpdateUptimeCheckConfigRequest\x1a\'.google.monitoring.v3.UptimeCheckConfig"[\x82\xd3\xe4\x93\x02U2>/v3/{uptime_check_config.name=projects/*/uptimeCheckConfigs/*}:\x13uptime_check_config\x12\x9b\x01\n\x17\x44\x65leteUptimeCheckConfig\x12\x34.google.monitoring.v3.DeleteUptimeCheckConfigRequest\x1a\x16.google.protobuf.Empty"2\x82\xd3\xe4\x93\x02,**/v3/{name=projects/*/uptimeCheckConfigs/*}\x12\x93\x01\n\x12ListUptimeCheckIps\x12/.google.monitoring.v3.ListUptimeCheckIpsRequest\x1a\x30.google.monitoring.v3.ListUptimeCheckIpsResponse"\x1a\x82\xd3\xe4\x93\x02\x14\x12\x12/v3/uptimeCheckIps\x1a\xa9\x01\xca\x41\x19monitoring.googleapis.com\xd2\x41\x89\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.readB\xaa\x01\n\x18\x63om.google.monitoring.v3B\x12UptimeServiceProtoP\x01Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring\xaa\x02\x1aGoogle.Cloud.Monitoring.V3\xca\x02\x1aGoogle\\Cloud\\Monitoring\\V3b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_LISTUPTIMECHECKCONFIGSREQUEST = _descriptor.Descriptor( - name="ListUptimeCheckConfigsRequest", - full_name="google.monitoring.v3.ListUptimeCheckConfigsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.monitoring.v3.ListUptimeCheckConfigsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.monitoring.v3.ListUptimeCheckConfigsRequest.page_size", - index=1, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.monitoring.v3.ListUptimeCheckConfigsRequest.page_token", - index=2, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=276, - serialized_end=362, -) - - -_LISTUPTIMECHECKCONFIGSRESPONSE = _descriptor.Descriptor( - name="ListUptimeCheckConfigsResponse", - full_name="google.monitoring.v3.ListUptimeCheckConfigsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="uptime_check_configs", - full_name="google.monitoring.v3.ListUptimeCheckConfigsResponse.uptime_check_configs", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.monitoring.v3.ListUptimeCheckConfigsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="total_size", - full_name="google.monitoring.v3.ListUptimeCheckConfigsResponse.total_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=365, - serialized_end=513, -) - - -_GETUPTIMECHECKCONFIGREQUEST = _descriptor.Descriptor( - name="GetUptimeCheckConfigRequest", - full_name="google.monitoring.v3.GetUptimeCheckConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.GetUptimeCheckConfigRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=515, - serialized_end=558, -) - - -_CREATEUPTIMECHECKCONFIGREQUEST = _descriptor.Descriptor( - name="CreateUptimeCheckConfigRequest", - full_name="google.monitoring.v3.CreateUptimeCheckConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.monitoring.v3.CreateUptimeCheckConfigRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="uptime_check_config", - full_name="google.monitoring.v3.CreateUptimeCheckConfigRequest.uptime_check_config", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=560, - serialized_end=678, -) - - -_UPDATEUPTIMECHECKCONFIGREQUEST = _descriptor.Descriptor( - name="UpdateUptimeCheckConfigRequest", - full_name="google.monitoring.v3.UpdateUptimeCheckConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.monitoring.v3.UpdateUptimeCheckConfigRequest.update_mask", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="uptime_check_config", - full_name="google.monitoring.v3.UpdateUptimeCheckConfigRequest.uptime_check_config", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=681, - serialized_end=832, -) - - -_DELETEUPTIMECHECKCONFIGREQUEST = _descriptor.Descriptor( - name="DeleteUptimeCheckConfigRequest", - full_name="google.monitoring.v3.DeleteUptimeCheckConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.v3.DeleteUptimeCheckConfigRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=834, - serialized_end=880, -) - - -_LISTUPTIMECHECKIPSREQUEST = _descriptor.Descriptor( - name="ListUptimeCheckIpsRequest", - full_name="google.monitoring.v3.ListUptimeCheckIpsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.monitoring.v3.ListUptimeCheckIpsRequest.page_size", - index=0, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.monitoring.v3.ListUptimeCheckIpsRequest.page_token", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=882, - serialized_end=948, -) - - -_LISTUPTIMECHECKIPSRESPONSE = _descriptor.Descriptor( - name="ListUptimeCheckIpsResponse", - full_name="google.monitoring.v3.ListUptimeCheckIpsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="uptime_check_ips", - full_name="google.monitoring.v3.ListUptimeCheckIpsResponse.uptime_check_ips", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.monitoring.v3.ListUptimeCheckIpsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=950, - serialized_end=1066, -) - -_LISTUPTIMECHECKCONFIGSRESPONSE.fields_by_name[ - "uptime_check_configs" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2._UPTIMECHECKCONFIG -) -_CREATEUPTIMECHECKCONFIGREQUEST.fields_by_name[ - "uptime_check_config" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2._UPTIMECHECKCONFIG -) -_UPDATEUPTIMECHECKCONFIGREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_UPDATEUPTIMECHECKCONFIGREQUEST.fields_by_name[ - "uptime_check_config" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2._UPTIMECHECKCONFIG -) -_LISTUPTIMECHECKIPSRESPONSE.fields_by_name[ - "uptime_check_ips" -].message_type = ( - google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2._UPTIMECHECKIP -) -DESCRIPTOR.message_types_by_name[ - "ListUptimeCheckConfigsRequest" -] = _LISTUPTIMECHECKCONFIGSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListUptimeCheckConfigsResponse" -] = _LISTUPTIMECHECKCONFIGSRESPONSE -DESCRIPTOR.message_types_by_name[ - "GetUptimeCheckConfigRequest" -] = _GETUPTIMECHECKCONFIGREQUEST -DESCRIPTOR.message_types_by_name[ - "CreateUptimeCheckConfigRequest" -] = _CREATEUPTIMECHECKCONFIGREQUEST -DESCRIPTOR.message_types_by_name[ - "UpdateUptimeCheckConfigRequest" -] = _UPDATEUPTIMECHECKCONFIGREQUEST -DESCRIPTOR.message_types_by_name[ - "DeleteUptimeCheckConfigRequest" -] = _DELETEUPTIMECHECKCONFIGREQUEST -DESCRIPTOR.message_types_by_name[ - "ListUptimeCheckIpsRequest" -] = _LISTUPTIMECHECKIPSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListUptimeCheckIpsResponse" -] = _LISTUPTIMECHECKIPSRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ListUptimeCheckConfigsRequest = _reflection.GeneratedProtocolMessageType( - "ListUptimeCheckConfigsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTUPTIMECHECKCONFIGSREQUEST, - __module__="google.cloud.monitoring_v3.proto.uptime_service_pb2", - __doc__="""The protocol for the ``ListUptimeCheckConfigs`` request. - - - Attributes: - parent: - The project whose Uptime check configurations are listed. The - format is ``projects/[PROJECT_ID]``. - page_size: - The maximum number of results to return in a single response. - The server may further constrain the maximum number of results - returned in a single page. If the page\_size is <=0, the - server will decide the number of results to be returned. - page_token: - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return more - results from the previous method call. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListUptimeCheckConfigsRequest) - ), -) -_sym_db.RegisterMessage(ListUptimeCheckConfigsRequest) - -ListUptimeCheckConfigsResponse = _reflection.GeneratedProtocolMessageType( - "ListUptimeCheckConfigsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTUPTIMECHECKCONFIGSRESPONSE, - __module__="google.cloud.monitoring_v3.proto.uptime_service_pb2", - __doc__="""The protocol for the ``ListUptimeCheckConfigs`` response. - - - Attributes: - uptime_check_configs: - The returned Uptime check configurations. - next_page_token: - This field represents the pagination token to retrieve the - next page of results. If the value is empty, it means no - further results for the request. To retrieve the next page of - results, the value of the next\_page\_token is passed to the - subsequent List method call (in the request message's - page\_token field). - total_size: - The total number of Uptime check configurations for the - project, irrespective of any pagination. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListUptimeCheckConfigsResponse) - ), -) -_sym_db.RegisterMessage(ListUptimeCheckConfigsResponse) - -GetUptimeCheckConfigRequest = _reflection.GeneratedProtocolMessageType( - "GetUptimeCheckConfigRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETUPTIMECHECKCONFIGREQUEST, - __module__="google.cloud.monitoring_v3.proto.uptime_service_pb2", - __doc__="""The protocol for the ``GetUptimeCheckConfig`` request. - - - Attributes: - name: - The Uptime check configuration to retrieve. The format is ``pr - ojects/[PROJECT_ID]/uptimeCheckConfigs/[UPTIME_CHECK_ID]``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.GetUptimeCheckConfigRequest) - ), -) -_sym_db.RegisterMessage(GetUptimeCheckConfigRequest) - -CreateUptimeCheckConfigRequest = _reflection.GeneratedProtocolMessageType( - "CreateUptimeCheckConfigRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATEUPTIMECHECKCONFIGREQUEST, - __module__="google.cloud.monitoring_v3.proto.uptime_service_pb2", - __doc__="""The protocol for the ``CreateUptimeCheckConfig`` request. - - - Attributes: - parent: - The project in which to create the Uptime check. The format is - ``projects/[PROJECT_ID]``. - uptime_check_config: - The new Uptime check configuration. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateUptimeCheckConfigRequest) - ), -) -_sym_db.RegisterMessage(CreateUptimeCheckConfigRequest) - -UpdateUptimeCheckConfigRequest = _reflection.GeneratedProtocolMessageType( - "UpdateUptimeCheckConfigRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATEUPTIMECHECKCONFIGREQUEST, - __module__="google.cloud.monitoring_v3.proto.uptime_service_pb2", - __doc__="""The protocol for the ``UpdateUptimeCheckConfig`` request. - - - Attributes: - update_mask: - Optional. If present, only the listed fields in the current - Uptime check configuration are updated with values from the - new configuration. If this field is empty, then the current - configuration is completely replaced with the new - configuration. - uptime_check_config: - Required. If an ``updateMask`` has been specified, this field - gives the values for the set of fields mentioned in the - ``updateMask``. If an ``updateMask`` has not been given, this - Uptime check configuration replaces the current configuration. - If a field is mentioned in ``updateMask`` but the corresonding - field is omitted in this partial Uptime check configuration, - it has the effect of deleting/clearing the field from the - configuration on the server. The following fields can be - updated: ``display_name``, ``http_check``, ``tcp_check``, - ``timeout``, ``content_matchers``, and ``selected_regions``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.UpdateUptimeCheckConfigRequest) - ), -) -_sym_db.RegisterMessage(UpdateUptimeCheckConfigRequest) - -DeleteUptimeCheckConfigRequest = _reflection.GeneratedProtocolMessageType( - "DeleteUptimeCheckConfigRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEUPTIMECHECKCONFIGREQUEST, - __module__="google.cloud.monitoring_v3.proto.uptime_service_pb2", - __doc__="""The protocol for the ``DeleteUptimeCheckConfig`` request. - - - Attributes: - name: - The Uptime check configuration to delete. The format is ``proj - ects/[PROJECT_ID]/uptimeCheckConfigs/[UPTIME_CHECK_ID]``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.DeleteUptimeCheckConfigRequest) - ), -) -_sym_db.RegisterMessage(DeleteUptimeCheckConfigRequest) - -ListUptimeCheckIpsRequest = _reflection.GeneratedProtocolMessageType( - "ListUptimeCheckIpsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTUPTIMECHECKIPSREQUEST, - __module__="google.cloud.monitoring_v3.proto.uptime_service_pb2", - __doc__="""The protocol for the ``ListUptimeCheckIps`` request. - - - Attributes: - page_size: - The maximum number of results to return in a single response. - The server may further constrain the maximum number of results - returned in a single page. If the page\_size is <=0, the - server will decide the number of results to be returned. NOTE: - this field is not yet implemented - page_token: - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return more - results from the previous method call. NOTE: this field is not - yet implemented - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListUptimeCheckIpsRequest) - ), -) -_sym_db.RegisterMessage(ListUptimeCheckIpsRequest) - -ListUptimeCheckIpsResponse = _reflection.GeneratedProtocolMessageType( - "ListUptimeCheckIpsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTUPTIMECHECKIPSRESPONSE, - __module__="google.cloud.monitoring_v3.proto.uptime_service_pb2", - __doc__="""The protocol for the ``ListUptimeCheckIps`` response. - - - Attributes: - uptime_check_ips: - The returned list of IP addresses (including region and - location) that the checkers run from. - next_page_token: - This field represents the pagination token to retrieve the - next page of results. If the value is empty, it means no - further results for the request. To retrieve the next page of - results, the value of the next\_page\_token is passed to the - subsequent List method call (in the request message's - page\_token field). NOTE: this field is not yet implemented - """, - # @@protoc_insertion_point(class_scope:google.monitoring.v3.ListUptimeCheckIpsResponse) - ), -) -_sym_db.RegisterMessage(ListUptimeCheckIpsResponse) - - -DESCRIPTOR._options = None - -_UPTIMECHECKSERVICE = _descriptor.ServiceDescriptor( - name="UptimeCheckService", - full_name="google.monitoring.v3.UptimeCheckService", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\031monitoring.googleapis.com\322A\211\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.read" - ), - serialized_start=1069, - serialized_end=2336, - methods=[ - _descriptor.MethodDescriptor( - name="ListUptimeCheckConfigs", - full_name="google.monitoring.v3.UptimeCheckService.ListUptimeCheckConfigs", - index=0, - containing_service=None, - input_type=_LISTUPTIMECHECKCONFIGSREQUEST, - output_type=_LISTUPTIMECHECKCONFIGSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002,\022*/v3/{parent=projects/*}/uptimeCheckConfigs" - ), - ), - _descriptor.MethodDescriptor( - name="GetUptimeCheckConfig", - full_name="google.monitoring.v3.UptimeCheckService.GetUptimeCheckConfig", - index=1, - containing_service=None, - input_type=_GETUPTIMECHECKCONFIGREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2._UPTIMECHECKCONFIG, - serialized_options=_b( - "\202\323\344\223\002,\022*/v3/{name=projects/*/uptimeCheckConfigs/*}" - ), - ), - _descriptor.MethodDescriptor( - name="CreateUptimeCheckConfig", - full_name="google.monitoring.v3.UptimeCheckService.CreateUptimeCheckConfig", - index=2, - containing_service=None, - input_type=_CREATEUPTIMECHECKCONFIGREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2._UPTIMECHECKCONFIG, - serialized_options=_b( - '\202\323\344\223\002A"*/v3/{parent=projects/*}/uptimeCheckConfigs:\023uptime_check_config' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateUptimeCheckConfig", - full_name="google.monitoring.v3.UptimeCheckService.UpdateUptimeCheckConfig", - index=3, - containing_service=None, - input_type=_UPDATEUPTIMECHECKCONFIGREQUEST, - output_type=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2._UPTIMECHECKCONFIG, - serialized_options=_b( - "\202\323\344\223\002U2>/v3/{uptime_check_config.name=projects/*/uptimeCheckConfigs/*}:\023uptime_check_config" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteUptimeCheckConfig", - full_name="google.monitoring.v3.UptimeCheckService.DeleteUptimeCheckConfig", - index=4, - containing_service=None, - input_type=_DELETEUPTIMECHECKCONFIGREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002,**/v3/{name=projects/*/uptimeCheckConfigs/*}" - ), - ), - _descriptor.MethodDescriptor( - name="ListUptimeCheckIps", - full_name="google.monitoring.v3.UptimeCheckService.ListUptimeCheckIps", - index=5, - containing_service=None, - input_type=_LISTUPTIMECHECKIPSREQUEST, - output_type=_LISTUPTIMECHECKIPSRESPONSE, - serialized_options=_b("\202\323\344\223\002\024\022\022/v3/uptimeCheckIps"), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_UPTIMECHECKSERVICE) - -DESCRIPTOR.services_by_name["UptimeCheckService"] = _UPTIMECHECKSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/monitoring/google/cloud/monitoring_v3/proto/uptime_service_pb2_grpc.py b/monitoring/google/cloud/monitoring_v3/proto/uptime_service_pb2_grpc.py deleted file mode 100644 index 9835d01f9e0b..000000000000 --- a/monitoring/google/cloud/monitoring_v3/proto/uptime_service_pb2_grpc.py +++ /dev/null @@ -1,158 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.monitoring_v3.proto import ( - uptime_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2, -) -from google.cloud.monitoring_v3.proto import ( - uptime_service_pb2 as google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class UptimeCheckServiceStub(object): - """The UptimeCheckService API is used to manage (list, create, delete, edit) - Uptime check configurations in the Stackdriver Monitoring product. An Uptime - check is a piece of configuration that determines which resources and - services to monitor for availability. These configurations can also be - configured interactively by navigating to the [Cloud Console] - (http://console.cloud.google.com), selecting the appropriate project, - clicking on "Monitoring" on the left-hand side to navigate to Stackdriver, - and then clicking on "Uptime". - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListUptimeCheckConfigs = channel.unary_unary( - "/google.monitoring.v3.UptimeCheckService/ListUptimeCheckConfigs", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.ListUptimeCheckConfigsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.ListUptimeCheckConfigsResponse.FromString, - ) - self.GetUptimeCheckConfig = channel.unary_unary( - "/google.monitoring.v3.UptimeCheckService/GetUptimeCheckConfig", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.GetUptimeCheckConfigRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2.UptimeCheckConfig.FromString, - ) - self.CreateUptimeCheckConfig = channel.unary_unary( - "/google.monitoring.v3.UptimeCheckService/CreateUptimeCheckConfig", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.CreateUptimeCheckConfigRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2.UptimeCheckConfig.FromString, - ) - self.UpdateUptimeCheckConfig = channel.unary_unary( - "/google.monitoring.v3.UptimeCheckService/UpdateUptimeCheckConfig", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.UpdateUptimeCheckConfigRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2.UptimeCheckConfig.FromString, - ) - self.DeleteUptimeCheckConfig = channel.unary_unary( - "/google.monitoring.v3.UptimeCheckService/DeleteUptimeCheckConfig", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.DeleteUptimeCheckConfigRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ListUptimeCheckIps = channel.unary_unary( - "/google.monitoring.v3.UptimeCheckService/ListUptimeCheckIps", - request_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.ListUptimeCheckIpsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.ListUptimeCheckIpsResponse.FromString, - ) - - -class UptimeCheckServiceServicer(object): - """The UptimeCheckService API is used to manage (list, create, delete, edit) - Uptime check configurations in the Stackdriver Monitoring product. An Uptime - check is a piece of configuration that determines which resources and - services to monitor for availability. These configurations can also be - configured interactively by navigating to the [Cloud Console] - (http://console.cloud.google.com), selecting the appropriate project, - clicking on "Monitoring" on the left-hand side to navigate to Stackdriver, - and then clicking on "Uptime". - """ - - def ListUptimeCheckConfigs(self, request, context): - """Lists the existing valid Uptime check configurations for the project - (leaving out any invalid configurations). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetUptimeCheckConfig(self, request, context): - """Gets a single Uptime check configuration. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateUptimeCheckConfig(self, request, context): - """Creates a new Uptime check configuration. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateUptimeCheckConfig(self, request, context): - """Updates an Uptime check configuration. You can either replace the entire - configuration with a new one or replace only certain fields in the current - configuration by specifying the fields to be updated via `updateMask`. - Returns the updated configuration. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteUptimeCheckConfig(self, request, context): - """Deletes an Uptime check configuration. Note that this method will fail - if the Uptime check configuration is referenced by an alert policy or - other dependent configs that would be rendered invalid by the deletion. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListUptimeCheckIps(self, request, context): - """Returns the list of IP addresses that checkers run from - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_UptimeCheckServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "ListUptimeCheckConfigs": grpc.unary_unary_rpc_method_handler( - servicer.ListUptimeCheckConfigs, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.ListUptimeCheckConfigsRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.ListUptimeCheckConfigsResponse.SerializeToString, - ), - "GetUptimeCheckConfig": grpc.unary_unary_rpc_method_handler( - servicer.GetUptimeCheckConfig, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.GetUptimeCheckConfigRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2.UptimeCheckConfig.SerializeToString, - ), - "CreateUptimeCheckConfig": grpc.unary_unary_rpc_method_handler( - servicer.CreateUptimeCheckConfig, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.CreateUptimeCheckConfigRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2.UptimeCheckConfig.SerializeToString, - ), - "UpdateUptimeCheckConfig": grpc.unary_unary_rpc_method_handler( - servicer.UpdateUptimeCheckConfig, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.UpdateUptimeCheckConfigRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__pb2.UptimeCheckConfig.SerializeToString, - ), - "DeleteUptimeCheckConfig": grpc.unary_unary_rpc_method_handler( - servicer.DeleteUptimeCheckConfig, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.DeleteUptimeCheckConfigRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "ListUptimeCheckIps": grpc.unary_unary_rpc_method_handler( - servicer.ListUptimeCheckIps, - request_deserializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.ListUptimeCheckIpsRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__v3_dot_proto_dot_uptime__service__pb2.ListUptimeCheckIpsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.monitoring.v3.UptimeCheckService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/monitoring/google/cloud/monitoring_v3/query.py b/monitoring/google/cloud/monitoring_v3/query.py deleted file mode 100644 index 18ea0573515d..000000000000 --- a/monitoring/google/cloud/monitoring_v3/query.py +++ /dev/null @@ -1,625 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Time series query for the `Google Stackdriver Monitoring API (V3)`_. - -.. _Google Stackdriver Monitoring API (V3): - https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\ - projects.timeSeries/list -""" - -import copy -import datetime - -import six - -from google.cloud.monitoring_v3 import _dataframe -from google.cloud.monitoring_v3 import types -from google.cloud.monitoring_v3.gapic import enums - -_UTCNOW = datetime.datetime.utcnow # To be replaced by tests. - - -class Query(object): - """Query object for retrieving metric data. - - :type client: :class:`google.cloud.monitoring_v3.gapic. - metric_service_client.MetricServiceClient` - :param client: The client to use. - - :type project: str - :param project: The project ID or number. - - :type metric_type: str - :param metric_type: The metric type name. The default value is - :data:`Query.DEFAULT_METRIC_TYPE - `, - but please note that this default value is provided only for - demonstration purposes and is subject to change. See the - `supported metrics`_. - - :type end_time: :class:`datetime.datetime` - :param end_time: (Optional) The end time (inclusive) of the time interval - for which results should be returned, as a datetime object. - The default is the start of the current minute. - - The start time (exclusive) is determined by combining the - values of ``days``, ``hours``, and ``minutes``, and - subtracting the resulting duration from the end time. - - It is also allowed to omit the end time and duration here, - in which case - :meth:`~google.cloud.monitoring.query.Query.select_interval` - must be called before the query is executed. - - :type days: int - :param days: The number of days in the time interval. - - :type hours: int - :param hours: The number of hours in the time interval. - - :type minutes: int - :param minutes: The number of minutes in the time interval. - - :raises: :exc:`ValueError` if ``end_time`` is specified but - ``days``, ``hours``, and ``minutes`` are all zero. - If you really want to specify a point in time, use - :meth:`~google.cloud.monitoring.query.Query.select_interval`. - - .. _supported metrics: https://cloud.google.com/monitoring/api/metrics - """ - - DEFAULT_METRIC_TYPE = "compute.googleapis.com/instance/cpu/utilization" - - def __init__( - self, - client, - project, - metric_type=DEFAULT_METRIC_TYPE, - end_time=None, - days=0, - hours=0, - minutes=0, - ): - start_time = None - if days or hours or minutes: - if end_time is None: - end_time = _UTCNOW().replace(second=0, microsecond=0) - start_time = end_time - datetime.timedelta( - days=days, hours=hours, minutes=minutes - ) - elif end_time is not None: - raise ValueError("Non-zero duration required for time interval.") - - self._client = client - self._project_path = self._client.project_path(project) - self._end_time = end_time - self._start_time = start_time - self._filter = _Filter(metric_type) - - self._per_series_aligner = 0 - self._alignment_period_seconds = 0 - self._cross_series_reducer = 0 - self._group_by_fields = () - - def __iter__(self): - return self.iter() - - @property - def metric_type(self): - """The metric type name.""" - return self._filter.metric_type - - @property - def filter(self): - """The filter string. - - This is constructed from the metric type, the resource type, and - selectors for the group ID, monitored projects, resource labels, - and metric labels. - """ - return str(self._filter) - - def select_interval(self, end_time, start_time=None): - """Copy the query and set the query time interval. - - Example:: - - import datetime - - now = datetime.datetime.utcnow() - query = query.select_interval( - end_time=now, - start_time=now - datetime.timedelta(minutes=5)) - - As a convenience, you can alternatively specify the end time and - an interval duration when you create the query initially. - - :type end_time: :class:`datetime.datetime` - :param end_time: The end time (inclusive) of the time interval - for which results should be returned, as a datetime object. - - :type start_time: :class:`datetime.datetime` - :param start_time: - (Optional) The start time (exclusive) of the time interval - for which results should be returned, as a datetime object. - If not specified, the interval is a point in time. - - :rtype: :class:`Query` - :returns: The new query object. - """ - new_query = copy.deepcopy(self) - new_query._end_time = end_time - new_query._start_time = start_time - return new_query - - def select_group(self, group_id): - """Copy the query and add filtering by group. - - Example:: - - query = query.select_group('1234567') - - :type group_id: str - :param group_id: The ID of a group to filter by. - - :rtype: :class:`Query` - :returns: The new query object. - """ - new_query = copy.deepcopy(self) - new_query._filter.group_id = group_id - return new_query - - def select_projects(self, *args): - """Copy the query and add filtering by monitored projects. - - This is only useful if the target project represents a Stackdriver - account containing the specified monitored projects. - - Examples:: - - query = query.select_projects('project-1') - query = query.select_projects('project-1', 'project-2') - - :type args: tuple - :param args: Project IDs limiting the resources to be included - in the query. - - :rtype: :class:`Query` - :returns: The new query object. - """ - new_query = copy.deepcopy(self) - new_query._filter.projects = args - return new_query - - def select_resources(self, *args, **kwargs): - """Copy the query and add filtering by resource labels. - - Examples:: - - query = query.select_resources(zone='us-central1-a') - query = query.select_resources(zone_prefix='europe-') - query = query.select_resources(resource_type='gce_instance') - - A keyword argument ``