- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please + As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please visit Python 2 support on Google Cloud.
{% block body %} {% endblock %} diff --git a/packages/google-cloud-monitoring/docs/conf.py b/packages/google-cloud-monitoring/docs/conf.py index ab6d693cc504..c283c715d5e0 100644 --- a/packages/google-cloud-monitoring/docs/conf.py +++ b/packages/google-cloud-monitoring/docs/conf.py @@ -20,12 +20,16 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + __version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -35,6 +39,7 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", @@ -90,7 +95,12 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -337,7 +347,7 @@ intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), } diff --git a/packages/google-cloud-monitoring/noxfile.py b/packages/google-cloud-monitoring/noxfile.py index fef9e7ba8ecf..481973ccd16b 100644 --- a/packages/google-cloud-monitoring/noxfile.py +++ b/packages/google-cloud-monitoring/noxfile.py @@ -23,11 +23,11 @@ import nox -BLACK_VERSION = "black==19.3b0" +BLACK_VERSION = "black==19.10b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.7" -SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.7"] +DEFAULT_PYTHON_VERSION = "3.8" +SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] @@ -39,7 +39,9 @@ def lint(session): serious code quality issues. """ session.install("flake8", BLACK_VERSION) - session.run("black", "--check", *BLACK_PATHS) + session.run( + "black", "--check", *BLACK_PATHS, + ) session.run("flake8", "google", "tests") @@ -54,7 +56,9 @@ def blacken(session): check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) + session.run( + "black", *BLACK_PATHS, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -96,6 +100,10 @@ def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") @@ -111,7 +119,9 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install("mock", "pytest", "google-cloud-testutils") + session.install( + "mock", "pytest", "google-cloud-testutils", + ) session.install("-e", ".") # Run py.test against the system tests. @@ -154,3 +164,38 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + # sphinx-docfx-yaml supports up to sphinx version 1.5.5. + # https://github.com/docascode/sphinx-docfx-yaml/issues/97 + session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/packages/google-cloud-monitoring/samples/AUTHORING_GUIDE.md b/packages/google-cloud-monitoring/samples/AUTHORING_GUIDE.md new file mode 100644 index 000000000000..55c97b32f4c1 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-monitoring/samples/CONTRIBUTING.md b/packages/google-cloud-monitoring/samples/CONTRIBUTING.md new file mode 100644 index 000000000000..34c882b6f1a3 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/.gitignore b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/.gitignore new file mode 100644 index 000000000000..de0a466d79c3 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/.gitignore @@ -0,0 +1 @@ +backup.json diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/README.rst b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/README.rst new file mode 100644 index 000000000000..bb59aad5feeb --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/README.rst @@ -0,0 +1,138 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google Stackdriver Alerting API Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/alerts-client/README.rst + + +This directory contains samples for Google Stackdriver Alerting API. Stackdriver Monitoring collects metrics, events, and metadata from Google Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, application instrumentation, and a variety of common application components including Cassandra, Nginx, Apache Web Server, Elasticsearch and many others. Stackdriver's Alerting API allows you to create, delete, and make back up copies of your alert policies. + + + + +.. _Google Stackdriver Alerting API: https://cloud.google.com/monitoring/alerts/ + +To run the sample, you need to enable the API at: https://console.cloud.google.com/apis/library/monitoring.googleapis.com + +To run the sample, you need to have `Monitoring Admin` role. + +Please visit [the Cloud Console UI of this API](https://console.cloud.google.com/monitoring) and [create a new Workspace with the same name of your Cloud project](https://cloud.google.com/monitoring/workspaces/create). + + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started + +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Snippets ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/alerts-client/snippets.py,monitoring/api/v3/alerts-client/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python snippets.py + + usage: snippets.py [-h] + {list-alert-policies,list-notification-channels,enable-alert-policies,disable-alert-policies,replace-notification-channels,backup,restore} + ... + + Demonstrates AlertPolicy API operations. + + positional arguments: + {list-alert-policies,list-notification-channels,enable-alert-policies,disable-alert-policies,replace-notification-channels,backup,restore} + list-alert-policies + list-notification-channels + enable-alert-policies + Enable or disable alert policies in a project. + Arguments: project_name (str) enable (bool): Enable or + disable the policies. filter_ (str, optional): Only + enable/disable alert policies that match this filter_. + See + https://cloud.google.com/monitoring/api/v3/sorting- + and-filtering + disable-alert-policies + Enable or disable alert policies in a project. + Arguments: project_name (str) enable (bool): Enable or + disable the policies. filter_ (str, optional): Only + enable/disable alert policies that match this filter_. + See + https://cloud.google.com/monitoring/api/v3/sorting- + and-filtering + replace-notification-channels + backup + restore + + optional arguments: + -h, --help show this help message and exit + + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/README.rst.in b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/README.rst.in new file mode 100644 index 000000000000..00b280124ea4 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/README.rst.in @@ -0,0 +1,33 @@ +# This file is used to generate README.rst + +product: + name: Google Stackdriver Alerting API + short_name: Stackdriver Alerting API + url: https://cloud.google.com/monitoring/alerts/ + description: > + Stackdriver Monitoring collects metrics, events, and metadata from Google + Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, + application instrumentation, and a variety of common application + components including Cassandra, Nginx, Apache Web Server, Elasticsearch + and many others. Stackdriver's Alerting API allows you to create, + delete, and make back up copies of your alert policies. + +required_api_url: https://console.cloud.google.com/apis/library/monitoring.googleapis.com +required_role: Monitoring Admin +other_required_steps: > + Please visit [the Cloud Console UI of this + API](https://console.cloud.google.com/monitoring) and create a new + Workspace with the same name of your Cloud project. + +setup: +- auth +- install_deps + +samples: +- name: Snippets + file: snippets.py + show_help: true + +cloud_client_library: true + +folder: monitoring/api/v3/alerts-client diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/noxfile.py b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/noxfile.py new file mode 100644 index 000000000000..ba55d7ce53ca --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/noxfile.py @@ -0,0 +1,224 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append('.') + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG['gcloud_project_env'] + # This should error out if not set. + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG['envs']) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + "." + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/noxfile_config.py b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/noxfile_config.py new file mode 100644 index 000000000000..664c58309d70 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/noxfile_config.py @@ -0,0 +1,42 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be inported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # Declare optional test sessions you want to opt-in. Currently we + # have the following optional test sessions: + # 'cloud_run' # Test session for Cloud Run application. + 'opt_in_sessions': [], + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + # 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/requirements-test.txt b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/requirements-test.txt new file mode 100644 index 000000000000..ec623710c6ef --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/requirements-test.txt @@ -0,0 +1,3 @@ +pytest==6.0.1 +retrying==1.3.3 +flaky==3.7.0 diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/requirements.txt b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/requirements.txt new file mode 100644 index 000000000000..bc7a2fe57c8b --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/requirements.txt @@ -0,0 +1,2 @@ +google-cloud-monitoring==1.1.0 +tabulate==0.8.7 diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/snippets.py b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/snippets.py new file mode 100644 index 000000000000..80254232e6af --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/snippets.py @@ -0,0 +1,343 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import argparse +import json +import os + +from google.cloud import monitoring_v3 +import google.protobuf.json_format +import tabulate + + +# [START monitoring_alert_list_policies] +def list_alert_policies(project_name): + client = monitoring_v3.AlertPolicyServiceClient() + policies = client.list_alert_policies(project_name) + print(tabulate.tabulate( + [(policy.name, policy.display_name) for policy in policies], + ('name', 'display_name'))) +# [END monitoring_alert_list_policies] + + +# [START monitoring_alert_list_channels] +def list_notification_channels(project_name): + client = monitoring_v3.NotificationChannelServiceClient() + channels = client.list_notification_channels(project_name) + print(tabulate.tabulate( + [(channel.name, channel.display_name) for channel in channels], + ('name', 'display_name'))) +# [END monitoring_alert_list_channels] + + +# [START monitoring_alert_enable_policies] +def enable_alert_policies(project_name, enable, filter_=None): + """Enable or disable alert policies in a project. + + Arguments: + project_name (str) + enable (bool): Enable or disable the policies. + filter_ (str, optional): Only enable/disable alert policies that match + this filter_. See + https://cloud.google.com/monitoring/api/v3/sorting-and-filtering + """ + + client = monitoring_v3.AlertPolicyServiceClient() + policies = client.list_alert_policies(project_name, filter_=filter_) + + for policy in policies: + if bool(enable) == policy.enabled.value: + print('Policy', policy.name, 'is already', + 'enabled' if policy.enabled.value else 'disabled') + else: + policy.enabled.value = bool(enable) + mask = monitoring_v3.types.field_mask_pb2.FieldMask() + mask.paths.append('enabled') + client.update_alert_policy(policy, mask) + print('Enabled' if enable else 'Disabled', policy.name) +# [END monitoring_alert_enable_policies] + + +# [START monitoring_alert_replace_channels] +def replace_notification_channels(project_name, alert_policy_id, channel_ids): + _, project_id = project_name.split('/') + alert_client = monitoring_v3.AlertPolicyServiceClient() + channel_client = monitoring_v3.NotificationChannelServiceClient() + policy = monitoring_v3.types.alert_pb2.AlertPolicy() + policy.name = alert_client.alert_policy_path(project_id, alert_policy_id) + + for channel_id in channel_ids: + policy.notification_channels.append( + channel_client.notification_channel_path(project_id, channel_id)) + + mask = monitoring_v3.types.field_mask_pb2.FieldMask() + mask.paths.append('notification_channels') + updated_policy = alert_client.update_alert_policy(policy, mask) + print('Updated', updated_policy.name) +# [END monitoring_alert_replace_channels] + + +# [START monitoring_alert_delete_channel] +def delete_notification_channels(project_name, channel_ids, force=None): + channel_client = monitoring_v3.NotificationChannelServiceClient() + for channel_id in channel_ids: + channel_name = '{}/notificationChannels/{}'.format( + project_name, channel_id) + try: + channel_client.delete_notification_channel( + channel_name, force=force) + print('Channel {} deleted'.format(channel_name)) + except ValueError: + print('The parameters are invalid') + except Exception as e: + print('API call failed: {}'.format(e)) +# [END monitoring_alert_delete_channel] + + +# [START monitoring_alert_backup_policies] +def backup(project_name, backup_filename): + alert_client = monitoring_v3.AlertPolicyServiceClient() + channel_client = monitoring_v3.NotificationChannelServiceClient() + record = {'project_name': project_name, + 'policies': list(alert_client.list_alert_policies(project_name)), + 'channels': list(channel_client.list_notification_channels( + project_name))} + json.dump(record, open(backup_filename, 'wt'), cls=ProtoEncoder, indent=2) + print('Backed up alert policies and notification channels to {}.'.format( + backup_filename) + ) + + +class ProtoEncoder(json.JSONEncoder): + """Uses google.protobuf.json_format to encode protobufs as json.""" + def default(self, obj): + if type(obj) in (monitoring_v3.types.alert_pb2.AlertPolicy, + monitoring_v3.types.notification_pb2. + NotificationChannel): + text = google.protobuf.json_format.MessageToJson(obj) + return json.loads(text) + return super(ProtoEncoder, self).default(obj) +# [END monitoring_alert_backup_policies] + + +# [START monitoring_alert_restore_policies] +# [START monitoring_alert_create_policy] +# [START monitoring_alert_create_channel] +# [START monitoring_alert_update_channel] +# [START monitoring_alert_enable_channel] +def restore(project_name, backup_filename): + print('Loading alert policies and notification channels from {}.'.format( + backup_filename) + ) + record = json.load(open(backup_filename, 'rt')) + is_same_project = project_name == record['project_name'] + # Convert dicts to AlertPolicies. + policies_json = [json.dumps(policy) for policy in record['policies']] + policies = [google.protobuf.json_format.Parse( + policy_json, monitoring_v3.types.alert_pb2.AlertPolicy()) + for policy_json in policies_json] + # Convert dicts to NotificationChannels + channels_json = [json.dumps(channel) for channel in record['channels']] + channels = [google.protobuf.json_format.Parse( + channel_json, monitoring_v3.types.notification_pb2. + NotificationChannel()) for channel_json in channels_json] + + # Restore the channels. + channel_client = monitoring_v3.NotificationChannelServiceClient() + channel_name_map = {} + + for channel in channels: + updated = False + print('Updating channel', channel.display_name) + # This field is immutable and it is illegal to specify a + # non-default value (UNVERIFIED or VERIFIED) in the + # Create() or Update() operations. + channel.verification_status = monitoring_v3.enums.NotificationChannel.\ + VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED + + if is_same_project: + try: + channel_client.update_notification_channel(channel) + updated = True + except google.api_core.exceptions.NotFound: + pass # The channel was deleted. Create it below. + + if not updated: + # The channel no longer exists. Recreate it. + old_name = channel.name + channel.ClearField("name") + new_channel = channel_client.create_notification_channel( + project_name, channel) + channel_name_map[old_name] = new_channel.name + + # Restore the alerts + alert_client = monitoring_v3.AlertPolicyServiceClient() + + for policy in policies: + print('Updating policy', policy.display_name) + # These two fields cannot be set directly, so clear them. + policy.ClearField('creation_record') + policy.ClearField('mutation_record') + + # Update old channel names with new channel names. + for i, channel in enumerate(policy.notification_channels): + new_channel = channel_name_map.get(channel) + if new_channel: + policy.notification_channels[i] = new_channel + + updated = False + + if is_same_project: + try: + alert_client.update_alert_policy(policy) + updated = True + except google.api_core.exceptions.NotFound: + pass # The policy was deleted. Create it below. + except google.api_core.exceptions.InvalidArgument: + # Annoying that API throws InvalidArgument when the policy + # does not exist. Seems like it should throw NotFound. + pass # The policy was deleted. Create it below. + + if not updated: + # The policy no longer exists. Recreate it. + old_name = policy.name + policy.ClearField("name") + for condition in policy.conditions: + condition.ClearField("name") + policy = alert_client.create_alert_policy(project_name, policy) + print('Updated', policy.name) +# [END monitoring_alert_enable_channel] +# [END monitoring_alert_restore_policies] +# [END monitoring_alert_create_policy] +# [END monitoring_alert_create_channel] +# [END monitoring_alert_update_channel] + + +class MissingProjectIdError(Exception): + pass + + +def project_id(): + """Retreieves the project id from the environment variable. + + Raises: + MissingProjectIdError -- When not set. + + Returns: + str -- the project name + """ + project_id = os.environ['GOOGLE_CLOUD_PROJECT'] + + if not project_id: + raise MissingProjectIdError( + 'Set the environment variable ' + + 'GCLOUD_PROJECT to your Google Cloud Project Id.') + return project_id + + +def project_name(): + return 'projects/' + project_id() + + +if __name__ == '__main__': + + parser = argparse.ArgumentParser( + description='Demonstrates AlertPolicy API operations.') + + subparsers = parser.add_subparsers(dest='command') + + list_alert_policies_parser = subparsers.add_parser( + 'list-alert-policies', + help=list_alert_policies.__doc__ + ) + + list_notification_channels_parser = subparsers.add_parser( + 'list-notification-channels', + help=list_alert_policies.__doc__ + ) + + enable_alert_policies_parser = subparsers.add_parser( + 'enable-alert-policies', + help=enable_alert_policies.__doc__ + ) + enable_alert_policies_parser.add_argument( + '--filter', + ) + + disable_alert_policies_parser = subparsers.add_parser( + 'disable-alert-policies', + help=enable_alert_policies.__doc__ + ) + disable_alert_policies_parser.add_argument( + '--filter', + ) + + replace_notification_channels_parser = subparsers.add_parser( + 'replace-notification-channels', + help=replace_notification_channels.__doc__ + ) + replace_notification_channels_parser.add_argument( + '-p', '--alert_policy_id', + required=True + ) + replace_notification_channels_parser.add_argument( + '-c', '--notification_channel_id', + required=True, + action='append' + ) + + backup_parser = subparsers.add_parser( + 'backup', + help=backup.__doc__ + ) + backup_parser.add_argument( + '--backup_to_filename', + required=True + ) + + restore_parser = subparsers.add_parser( + 'restore', + help=restore.__doc__ + ) + restore_parser.add_argument( + '--restore_from_filename', + required=True + ) + + args = parser.parse_args() + + if args.command == 'list-alert-policies': + list_alert_policies(project_name()) + + elif args.command == 'list-notification-channels': + list_notification_channels(project_name()) + + elif args.command == 'enable-alert-policies': + enable_alert_policies(project_name(), enable=True, filter_=args.filter) + + elif args.command == 'disable-alert-policies': + enable_alert_policies(project_name(), enable=False, + filter_=args.filter) + + elif args.command == 'replace-notification-channels': + replace_notification_channels(project_name(), args.alert_policy_id, + args.notification_channel_id) + + elif args.command == 'backup': + backup(project_name(), args.backup_to_filename) + + elif args.command == 'restore': + restore(project_name(), args.restore_from_filename) diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/snippets_test.py b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/snippets_test.py new file mode 100644 index 000000000000..550a8dc97596 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/snippets_test.py @@ -0,0 +1,195 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import random +import string +import time + +from google.api_core.exceptions import Aborted +from google.api_core.exceptions import DeadlineExceeded +from google.api_core.exceptions import NotFound +from google.api_core.exceptions import ServiceUnavailable +from google.cloud import monitoring_v3 +import google.protobuf.json_format +import pytest +from retrying import retry + +import snippets + + +# We assume we have access to good randomness source. +random.seed() + + +def random_name(length): + return ''.join( + [random.choice(string.ascii_lowercase) for i in range(length)]) + + +def retry_on_exceptions(exception): + return isinstance( + exception, (Aborted, ServiceUnavailable, DeadlineExceeded)) + + +def delay_on_aborted(err, *args): + if retry_on_exceptions(err[1]): + # add randomness for avoiding continuous conflict + time.sleep(5 + (random.randint(0, 9) * 0.1)) + return True + return False + + +class PochanFixture: + """A test fixture that creates an alert POlicy and a notification CHANnel, + hence the name, pochan. + """ + + def __init__(self): + self.project_id = snippets.project_id() + self.project_name = snippets.project_name() + self.alert_policy_client = monitoring_v3.AlertPolicyServiceClient() + self.notification_channel_client = ( + monitoring_v3.NotificationChannelServiceClient()) + + def __enter__(self): + @retry(wait_exponential_multiplier=1000, wait_exponential_max=10000, + stop_max_attempt_number=10, + retry_on_exception=retry_on_exceptions) + def setup(): + # Create a policy. + policy = monitoring_v3.types.alert_pb2.AlertPolicy() + json = open('test_alert_policy.json').read() + google.protobuf.json_format.Parse(json, policy) + policy.display_name = 'snippets-test-' + random_name(10) + self.alert_policy = self.alert_policy_client.create_alert_policy( + self.project_name, policy) + # Create a notification channel. + notification_channel = ( + monitoring_v3.types.notification_pb2.NotificationChannel()) + json = open('test_notification_channel.json').read() + google.protobuf.json_format.Parse(json, notification_channel) + notification_channel.display_name = ( + 'snippets-test-' + random_name(10)) + self.notification_channel = ( + self.notification_channel_client.create_notification_channel( + self.project_name, notification_channel)) + setup() + return self + + def __exit__(self, type, value, traceback): + # Delete the policy and channel we created. + @retry(wait_exponential_multiplier=1000, wait_exponential_max=10000, + stop_max_attempt_number=10, + retry_on_exception=retry_on_exceptions) + def teardown(): + try: + self.alert_policy_client.delete_alert_policy( + self.alert_policy.name) + except NotFound: + print("Ignored NotFound when deleting a policy.") + try: + if self.notification_channel.name: + self.notification_channel_client\ + .delete_notification_channel( + self.notification_channel.name) + except NotFound: + print("Ignored NotFound when deleting a channel.") + teardown() + + +@pytest.fixture(scope='session') +def pochan(): + with PochanFixture() as pochan: + yield pochan + + +def test_list_alert_policies(capsys, pochan): + snippets.list_alert_policies(pochan.project_name) + out, _ = capsys.readouterr() + assert pochan.alert_policy.display_name in out + + +@pytest.mark.flaky(rerun_filter=delay_on_aborted, max_runs=5) +def test_enable_alert_policies(capsys, pochan): + # These sleep calls are for mitigating the following error: + # "409 Too many concurrent edits to the project configuration. + # Please try again." + # Having multiple projects will void these `sleep()` calls. + # See also #3310 + time.sleep(2) + snippets.enable_alert_policies(pochan.project_name, True) + out, _ = capsys.readouterr() + assert "Enabled {0}".format(pochan.project_name) in out \ + or "{} is already enabled".format(pochan.alert_policy.name) in out + + time.sleep(2) + snippets.enable_alert_policies(pochan.project_name, False) + out, _ = capsys.readouterr() + assert "Disabled {}".format(pochan.project_name) in out \ + or "{} is already disabled".format(pochan.alert_policy.name) in out + + +@pytest.mark.flaky(rerun_filter=delay_on_aborted, max_runs=5) +def test_replace_channels(capsys, pochan): + alert_policy_id = pochan.alert_policy.name.split('/')[-1] + notification_channel_id = pochan.notification_channel.name.split('/')[-1] + + # This sleep call is for mitigating the following error: + # "409 Too many concurrent edits to the project configuration. + # Please try again." + # Having multiple projects will void this `sleep()` call. + # See also #3310 + time.sleep(2) + snippets.replace_notification_channels( + pochan.project_name, alert_policy_id, [notification_channel_id]) + out, _ = capsys.readouterr() + assert "Updated {0}".format(pochan.alert_policy.name) in out + + +@pytest.mark.flaky(rerun_filter=delay_on_aborted, max_runs=5) +def test_backup_and_restore(capsys, pochan): + # These sleep calls are for mitigating the following error: + # "409 Too many concurrent edits to the project configuration. + # Please try again." + # Having multiple projects will void this `sleep()` call. + # See also #3310 + time.sleep(2) + snippets.backup(pochan.project_name, 'backup.json') + out, _ = capsys.readouterr() + + time.sleep(2) + snippets.restore(pochan.project_name, 'backup.json') + out, _ = capsys.readouterr() + assert "Updated {0}".format(pochan.alert_policy.name) in out + assert "Updating channel {0}".format( + pochan.notification_channel.display_name) in out + + +@pytest.mark.flaky(rerun_filter=delay_on_aborted, max_runs=5) +def test_delete_channels(capsys, pochan): + notification_channel_id = pochan.notification_channel.name.split('/')[-1] + + # This sleep call is for mitigating the following error: + # "409 Too many concurrent edits to the project configuration. + # Please try again." + # Having multiple projects will void these `sleep()` calls. + # See also #3310 + time.sleep(2) + snippets.delete_notification_channels( + pochan.project_name, [notification_channel_id], force=True) + out, _ = capsys.readouterr() + assert "{0} deleted".format(notification_channel_id) in out + pochan.notification_channel.name = '' # So teardown is not tried diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/test_alert_policy.json b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/test_alert_policy.json new file mode 100644 index 000000000000..d728949f9bb3 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/test_alert_policy.json @@ -0,0 +1,31 @@ +{ + "displayName": "test_alert_policy.json", + "combiner": "OR", + "conditions": [ + { + "conditionThreshold": { + "filter": "metric.label.state=\"blocked\" AND metric.type=\"agent.googleapis.com/processes/count_by_state\" AND resource.type=\"gce_instance\"", + "comparison": "COMPARISON_GT", + "thresholdValue": 100, + "duration": "900s", + "trigger": { + "percent": 0 + }, + "aggregations": [ + { + "alignmentPeriod": "60s", + "perSeriesAligner": "ALIGN_MEAN", + "crossSeriesReducer": "REDUCE_MEAN", + "groupByFields": [ + "project", + "resource.label.instance_id", + "resource.label.zone" + ] + } + ] + }, + "displayName": "test_alert_policy.json" + } + ], + "enabled": false +} \ No newline at end of file diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/test_notification_channel.json b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/test_notification_channel.json new file mode 100644 index 000000000000..6a0d53c00cdd --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/test_notification_channel.json @@ -0,0 +1,15 @@ +{ + "type": "email", + "displayName": "Email joe.", + "description": "test_notification_channel.json", + "labels": { + "email_address": "joe@example.com" + }, + "userLabels": { + "office": "california_westcoast_usa", + "division": "fulfillment", + "role": "operations", + "level": "5" + }, + "enabled": true +} \ No newline at end of file diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/README.rst b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/README.rst new file mode 100644 index 000000000000..280f9c4e0a79 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/README.rst @@ -0,0 +1,147 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google Stackdriver Monitoring API Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/cloud-client/README.rst + + +This directory contains samples for Google Stackdriver Monitoring API. Stackdriver Monitoring collects metrics, events, and metadata from Google Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, application instrumentation, and a variety of common application components including Cassandra, Nginx, Apache Web Server, Elasticsearch + and many others. Stackdriver ingests that data and generates insights + via dashboards, charts, and alerts. + + + + +.. _Google Stackdriver Monitoring API: https://cloud.google.com/monitoring/docs/ + +To run the sample, you need to enable the API at: https://console.cloud.google.com/apis/library/monitoring.googleapis.com + +To run the sample, you need to have `Monitoring Admin` role. + + +Please visit [the Cloud Console UI of this API](https://console.cloud.google.com/monitoring) and create a new Workspace with the same name of your Cloud project. + + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started + +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Quickstart ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/cloud-client/quickstart.py,monitoring/api/v3/cloud-client/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python quickstart.py + + +Snippets ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/cloud-client/snippets.py,monitoring/api/v3/cloud-client/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python snippets.py + + usage: snippets.py [-h] + {create-metric-descriptor,list-metric-descriptors,get-metric-descriptor,delete-metric-descriptor,list-resources,get-resource,write-time-series,list-time-series,list-time-series-header,list-time-series-reduce,list-time-series-aggregate} + ... + + Demonstrates Monitoring API operations. + + positional arguments: + {create-metric-descriptor,list-metric-descriptors,get-metric-descriptor,delete-metric-descriptor,list-resources,get-resource,write-time-series,list-time-series,list-time-series-header,list-time-series-reduce,list-time-series-aggregate} + create-metric-descriptor + list-metric-descriptors + get-metric-descriptor + delete-metric-descriptor + list-resources + get-resource + write-time-series + list-time-series + list-time-series-header + list-time-series-reduce + list-time-series-aggregate + + optional arguments: + -h, --help show this help message and exit + + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/README.rst.in b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/README.rst.in new file mode 100644 index 000000000000..0ab6b2258b78 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/README.rst.in @@ -0,0 +1,35 @@ +# This file is used to generate README.rst + +product: + name: Google Stackdriver Monitoring API + short_name: Stackdriver Monitoring API + url: https://cloud.google.com/monitoring/docs/ + description: > + Stackdriver Monitoring collects metrics, events, and metadata from Google + Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, + application instrumentation, and a variety of common application + components including Cassandra, Nginx, Apache Web Server, Elasticsearch + and many others. Stackdriver ingests that data and generates insights + via dashboards, charts, and alerts. + +required_api_url: https://console.cloud.google.com/apis/library/monitoring.googleapis.com +required_role: Monitoring Admin +other_required_steps: > + Please visit [the Cloud Console UI of this + API](https://console.cloud.google.com/monitoring) and create a new + Workspace with the same name of your Cloud project. + +setup: +- auth +- install_deps + +samples: +- name: Quickstart + file: quickstart.py +- name: Snippets + file: snippets.py + show_help: true + +cloud_client_library: true + +folder: monitoring/api/v3/cloud-client diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/noxfile.py b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/noxfile.py new file mode 100644 index 000000000000..ba55d7ce53ca --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/noxfile.py @@ -0,0 +1,224 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append('.') + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG['gcloud_project_env'] + # This should error out if not set. + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG['envs']) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + "." + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/noxfile_config.py b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/noxfile_config.py new file mode 100644 index 000000000000..664c58309d70 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/noxfile_config.py @@ -0,0 +1,42 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be inported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # Declare optional test sessions you want to opt-in. Currently we + # have the following optional test sessions: + # 'cloud_run' # Test session for Cloud Run application. + 'opt_in_sessions': [], + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + # 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/quickstart.py b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/quickstart.py new file mode 100644 index 000000000000..0527acae545e --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/quickstart.py @@ -0,0 +1,43 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def run_quickstart(): + # [START monitoring_quickstart] + from google.cloud import monitoring_v3 + + import time + + client = monitoring_v3.MetricServiceClient() + project = 'my-project' # TODO: Update to your project ID. + project_name = client.project_path(project) + + series = monitoring_v3.types.TimeSeries() + series.metric.type = 'custom.googleapis.com/my_metric' + series.resource.type = 'gce_instance' + series.resource.labels['instance_id'] = '1234567890123456789' + series.resource.labels['zone'] = 'us-central1-f' + point = series.points.add() + point.value.double_value = 3.14 + now = time.time() + point.interval.end_time.seconds = int(now) + point.interval.end_time.nanos = int( + (now - point.interval.end_time.seconds) * 10**9) + client.create_time_series(project_name, [series]) + print('Successfully wrote time series.') + # [END monitoring_quickstart] + + +if __name__ == '__main__': + run_quickstart() diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/quickstart_test.py b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/quickstart_test.py new file mode 100644 index 000000000000..fd0191aafc12 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/quickstart_test.py @@ -0,0 +1,46 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import backoff +import mock +import pytest + +import quickstart + + +PROJECT = os.environ['GOOGLE_CLOUD_PROJECT'] + + +@pytest.fixture +def mock_project_path(): + """Mock out project and replace with project from environment.""" + project_patch = mock.patch( + 'google.cloud.monitoring_v3.MetricServiceClient.' + 'project_path') + + with project_patch as project_mock: + project_mock.return_value = 'projects/{}'.format(PROJECT) + yield project_mock + + +def test_quickstart(capsys, mock_project_path): + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): + quickstart.run_quickstart() + out, _ = capsys.readouterr() + assert 'wrote' in out + + eventually_consistent_test() diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/requirements-test.txt b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/requirements-test.txt new file mode 100644 index 000000000000..b04e65e37c9c --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/requirements-test.txt @@ -0,0 +1,3 @@ +backoff==1.10.0 +pytest==6.0.1 +mock==4.0.2 diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/requirements.txt b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/requirements.txt new file mode 100644 index 000000000000..10c88fc2f9ae --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/requirements.txt @@ -0,0 +1 @@ +google-cloud-monitoring==1.1.0 diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/snippets.py b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/snippets.py new file mode 100644 index 000000000000..64b3853fd7cc --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/snippets.py @@ -0,0 +1,310 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os +import pprint +import time +import uuid + +from google.cloud import monitoring_v3 + + +PROJECT_ID = os.environ['GOOGLE_CLOUD_PROJECT'] + + +def create_metric_descriptor(project_id): + # [START monitoring_create_metric] + client = monitoring_v3.MetricServiceClient() + project_name = client.project_path(project_id) + descriptor = monitoring_v3.types.MetricDescriptor() + descriptor.type = 'custom.googleapis.com/my_metric' + str(uuid.uuid4()) + descriptor.metric_kind = ( + monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE) + descriptor.value_type = ( + monitoring_v3.enums.MetricDescriptor.ValueType.DOUBLE) + descriptor.description = 'This is a simple example of a custom metric.' + descriptor = client.create_metric_descriptor(project_name, descriptor) + print('Created {}.'.format(descriptor.name)) + # [END monitoring_create_metric] + + +def delete_metric_descriptor(descriptor_name): + # [START monitoring_delete_metric] + client = monitoring_v3.MetricServiceClient() + client.delete_metric_descriptor(descriptor_name) + print('Deleted metric descriptor {}.'.format(descriptor_name)) + # [END monitoring_delete_metric] + + +def write_time_series(project_id): + # [START monitoring_write_timeseries] + client = monitoring_v3.MetricServiceClient() + project_name = client.project_path(project_id) + + series = monitoring_v3.types.TimeSeries() + series.metric.type = 'custom.googleapis.com/my_metric' + str(uuid.uuid4()) + series.resource.type = 'gce_instance' + series.resource.labels['instance_id'] = '1234567890123456789' + series.resource.labels['zone'] = 'us-central1-f' + point = series.points.add() + point.value.double_value = 3.14 + now = time.time() + point.interval.end_time.seconds = int(now) + point.interval.end_time.nanos = int( + (now - point.interval.end_time.seconds) * 10**9) + client.create_time_series(project_name, [series]) + # [END monitoring_write_timeseries] + + +def list_time_series(project_id): + # [START monitoring_read_timeseries_simple] + client = monitoring_v3.MetricServiceClient() + project_name = client.project_path(project_id) + interval = monitoring_v3.types.TimeInterval() + now = time.time() + interval.end_time.seconds = int(now) + interval.end_time.nanos = int( + (now - interval.end_time.seconds) * 10**9) + interval.start_time.seconds = int(now - 1200) + interval.start_time.nanos = interval.end_time.nanos + results = client.list_time_series( + project_name, + 'metric.type = "compute.googleapis.com/instance/cpu/utilization"', + interval, + monitoring_v3.enums.ListTimeSeriesRequest.TimeSeriesView.FULL) + for result in results: + print(result) + # [END monitoring_read_timeseries_simple] + + +def list_time_series_header(project_id): + # [START monitoring_read_timeseries_fields] + client = monitoring_v3.MetricServiceClient() + project_name = client.project_path(project_id) + interval = monitoring_v3.types.TimeInterval() + now = time.time() + interval.end_time.seconds = int(now) + interval.end_time.nanos = int( + (now - interval.end_time.seconds) * 10**9) + interval.start_time.seconds = int(now - 1200) + interval.start_time.nanos = interval.end_time.nanos + results = client.list_time_series( + project_name, + 'metric.type = "compute.googleapis.com/instance/cpu/utilization"', + interval, + monitoring_v3.enums.ListTimeSeriesRequest.TimeSeriesView.HEADERS) + for result in results: + print(result) + # [END monitoring_read_timeseries_fields] + + +def list_time_series_aggregate(project_id): + # [START monitoring_read_timeseries_align] + client = monitoring_v3.MetricServiceClient() + project_name = client.project_path(project_id) + interval = monitoring_v3.types.TimeInterval() + now = time.time() + interval.end_time.seconds = int(now) + interval.end_time.nanos = int( + (now - interval.end_time.seconds) * 10**9) + interval.start_time.seconds = int(now - 3600) + interval.start_time.nanos = interval.end_time.nanos + aggregation = monitoring_v3.types.Aggregation() + aggregation.alignment_period.seconds = 1200 # 20 minutes + aggregation.per_series_aligner = ( + monitoring_v3.enums.Aggregation.Aligner.ALIGN_MEAN) + + results = client.list_time_series( + project_name, + 'metric.type = "compute.googleapis.com/instance/cpu/utilization"', + interval, + monitoring_v3.enums.ListTimeSeriesRequest.TimeSeriesView.FULL, + aggregation) + for result in results: + print(result) + # [END monitoring_read_timeseries_align] + + +def list_time_series_reduce(project_id): + # [START monitoring_read_timeseries_reduce] + client = monitoring_v3.MetricServiceClient() + project_name = client.project_path(project_id) + interval = monitoring_v3.types.TimeInterval() + now = time.time() + interval.end_time.seconds = int(now) + interval.end_time.nanos = int( + (now - interval.end_time.seconds) * 10**9) + interval.start_time.seconds = int(now - 3600) + interval.start_time.nanos = interval.end_time.nanos + aggregation = monitoring_v3.types.Aggregation() + aggregation.alignment_period.seconds = 1200 # 20 minutes + aggregation.per_series_aligner = ( + monitoring_v3.enums.Aggregation.Aligner.ALIGN_MEAN) + aggregation.cross_series_reducer = ( + monitoring_v3.enums.Aggregation.Reducer.REDUCE_MEAN) + aggregation.group_by_fields.append('resource.zone') + + results = client.list_time_series( + project_name, + 'metric.type = "compute.googleapis.com/instance/cpu/utilization"', + interval, + monitoring_v3.enums.ListTimeSeriesRequest.TimeSeriesView.FULL, + aggregation) + for result in results: + print(result) + # [END monitoring_read_timeseries_reduce] + + +def list_metric_descriptors(project_id): + # [START monitoring_list_descriptors] + client = monitoring_v3.MetricServiceClient() + project_name = client.project_path(project_id) + for descriptor in client.list_metric_descriptors(project_name): + print(descriptor.type) + # [END monitoring_list_descriptors] + + +def list_monitored_resources(project_id): + # [START monitoring_list_resources] + client = monitoring_v3.MetricServiceClient() + project_name = client.project_path(project_id) + resource_descriptors = ( + client.list_monitored_resource_descriptors(project_name)) + for descriptor in resource_descriptors: + print(descriptor.type) + # [END monitoring_list_resources] + + +def get_monitored_resource_descriptor(project_id, resource_type_name): + # [START monitoring_get_resource] + client = monitoring_v3.MetricServiceClient() + resource_path = client.monitored_resource_descriptor_path( + project_id, resource_type_name) + pprint.pprint(client.get_monitored_resource_descriptor(resource_path)) + # [END monitoring_get_resource] + + +def get_metric_descriptor(metric_name): + # [START monitoring_get_descriptor] + client = monitoring_v3.MetricServiceClient() + descriptor = client.get_metric_descriptor(metric_name) + pprint.pprint(descriptor) + # [END monitoring_get_descriptor] + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='Demonstrates Monitoring API operations.') + + subparsers = parser.add_subparsers(dest='command') + + create_metric_descriptor_parser = subparsers.add_parser( + 'create-metric-descriptor', + help=create_metric_descriptor.__doc__ + ) + + list_metric_descriptor_parser = subparsers.add_parser( + 'list-metric-descriptors', + help=list_metric_descriptors.__doc__ + ) + + get_metric_descriptor_parser = subparsers.add_parser( + 'get-metric-descriptor', + help=get_metric_descriptor.__doc__ + ) + + get_metric_descriptor_parser.add_argument( + '--metric-type-name', + help='The metric type of the metric descriptor to see details about.', + required=True + ) + + delete_metric_descriptor_parser = subparsers.add_parser( + 'delete-metric-descriptor', + help=list_metric_descriptors.__doc__ + ) + + delete_metric_descriptor_parser.add_argument( + '--metric-descriptor-name', + help='Metric descriptor to delete', + required=True + ) + + list_resources_parser = subparsers.add_parser( + 'list-resources', + help=list_monitored_resources.__doc__ + ) + + get_resource_parser = subparsers.add_parser( + 'get-resource', + help=get_monitored_resource_descriptor.__doc__ + ) + + get_resource_parser.add_argument( + '--resource-type-name', + help='Monitored resource to view more information about.', + required=True + ) + + write_time_series_parser = subparsers.add_parser( + 'write-time-series', + help=write_time_series.__doc__ + ) + + list_time_series_parser = subparsers.add_parser( + 'list-time-series', + help=list_time_series.__doc__ + ) + + list_time_series_header_parser = subparsers.add_parser( + 'list-time-series-header', + help=list_time_series_header.__doc__ + ) + + read_time_series_reduce = subparsers.add_parser( + 'list-time-series-reduce', + help=list_time_series_reduce.__doc__ + ) + + read_time_series_aggregate = subparsers.add_parser( + 'list-time-series-aggregate', + help=list_time_series_aggregate.__doc__ + ) + + args = parser.parse_args() + + if args.command == 'create-metric-descriptor': + create_metric_descriptor(PROJECT_ID) + if args.command == 'list-metric-descriptors': + list_metric_descriptors(PROJECT_ID) + if args.command == 'get-metric-descriptor': + get_metric_descriptor(args.metric_type_name) + if args.command == 'delete-metric-descriptor': + delete_metric_descriptor(args.metric_descriptor_name) + if args.command == 'list-resources': + list_monitored_resources(PROJECT_ID) + if args.command == 'get-resource': + get_monitored_resource_descriptor( + PROJECT_ID, args.resource_type_name) + if args.command == 'write-time-series': + write_time_series(PROJECT_ID) + if args.command == 'list-time-series': + list_time_series(PROJECT_ID) + if args.command == 'list-time-series-header': + list_time_series_header(PROJECT_ID) + if args.command == 'list-time-series-reduce': + list_time_series_reduce(PROJECT_ID) + if args.command == 'list-time-series-aggregate': + list_time_series_aggregate(PROJECT_ID) diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/snippets_test.py b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/snippets_test.py new file mode 100644 index 000000000000..5aabbda83922 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/snippets_test.py @@ -0,0 +1,117 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import re + +import backoff +from google.api_core.exceptions import InternalServerError +from google.api_core.exceptions import NotFound +import pytest + +import snippets + + +PROJECT_ID = os.environ['GOOGLE_CLOUD_PROJECT'] + + +@pytest.fixture(scope="function") +def custom_metric_descriptor(capsys): + snippets.create_metric_descriptor(PROJECT_ID) + out, _ = capsys.readouterr() + match = re.search(r'Created (.*)\.', out) + metric_name = match.group(1) + yield metric_name + + # teardown + try: + snippets.delete_metric_descriptor(metric_name) + except NotFound: + print("Metric descriptor already deleted") + + +@pytest.fixture(scope="module") +def write_time_series(): + + @backoff.on_exception(backoff.expo, InternalServerError, max_time=120) + def write(): + snippets.write_time_series(PROJECT_ID) + + write() + yield + + +def test_get_delete_metric_descriptor(capsys, custom_metric_descriptor): + try: + @backoff.on_exception( + backoff.expo, (AssertionError, NotFound), max_time=60) + def eventually_consistent_test(): + snippets.get_metric_descriptor(custom_metric_descriptor) + out, _ = capsys.readouterr() + assert 'DOUBLE' in out + + eventually_consistent_test() + finally: + snippets.delete_metric_descriptor(custom_metric_descriptor) + out, _ = capsys.readouterr() + assert 'Deleted metric' in out + + +def test_list_metric_descriptors(capsys): + snippets.list_metric_descriptors(PROJECT_ID) + out, _ = capsys.readouterr() + assert 'logging.googleapis.com/byte_count' in out + + +def test_list_resources(capsys): + snippets.list_monitored_resources(PROJECT_ID) + out, _ = capsys.readouterr() + assert 'pubsub_topic' in out + + +def test_get_resources(capsys): + snippets.get_monitored_resource_descriptor( + PROJECT_ID, 'pubsub_topic') + out, _ = capsys.readouterr() + assert 'A topic in Google Cloud Pub/Sub' in out + + +def test_list_time_series(capsys, write_time_series): + snippets.list_time_series(PROJECT_ID) + out, _ = capsys.readouterr() + assert 'gce_instance' in out + + +def test_list_time_series_header(capsys, write_time_series): + snippets.list_time_series_header(PROJECT_ID) + out, _ = capsys.readouterr() + assert 'gce_instance' in out + + +def test_list_time_series_aggregate(capsys, write_time_series): + snippets.list_time_series_aggregate(PROJECT_ID) + out, _ = capsys.readouterr() + assert 'points' in out + assert 'interval' in out + assert 'start_time' in out + assert 'end_time' in out + + +def test_list_time_series_reduce(capsys, write_time_series): + snippets.list_time_series_reduce(PROJECT_ID) + out, _ = capsys.readouterr() + assert 'points' in out + assert 'interval' in out + assert 'start_time' in out + assert 'end_time' in out diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/README.rst b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/README.rst new file mode 100644 index 000000000000..30046bdef9d2 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/README.rst @@ -0,0 +1,115 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google Stackdriver Uptime Checks API Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/uptime-check-client/README.rst + + +This directory contains samples for Google Stackdriver Uptime Checks API. Stackdriver Monitoring collects metrics, events, and metadata from Google Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, application instrumentation, and a variety of common application components including Cassandra, Nginx, Apache Web Server, Elasticsearch and many others. Stackdriver's Uptime Checks API allows you to create, delete, and list your project's Uptime Checks. + + + + +.. _Google Stackdriver Uptime Checks API: https://cloud.google.com/monitoring/uptime-checks/management + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started + +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Snippets ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/uptime-check-client/snippets.py,monitoring/api/v3/uptime-check-client/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python snippets.py + + usage: snippets.py [-h] + {list-uptime-check-configs,list-uptime-check-ips,create-uptime-check,get-uptime-check-config,delete-uptime-check-config} + ... + + Demonstrates Uptime Check API operations. + + positional arguments: + {list-uptime-check-configs,list-uptime-check-ips,create-uptime-check,get-uptime-check-config,delete-uptime-check-config} + list-uptime-check-configs + list-uptime-check-ips + create-uptime-check + get-uptime-check-config + delete-uptime-check-config + + optional arguments: + -h, --help show this help message and exit + + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/README.rst.in b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/README.rst.in new file mode 100644 index 000000000000..1174962e48d1 --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/README.rst.in @@ -0,0 +1,26 @@ +# This file is used to generate README.rst + +product: + name: Google Stackdriver Uptime Checks API + short_name: Stackdriver Uptime Checks API + url: https://cloud.google.com/monitoring/uptime-checks/management + description: > + Stackdriver Monitoring collects metrics, events, and metadata from Google + Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, + application instrumentation, and a variety of common application + components including Cassandra, Nginx, Apache Web Server, Elasticsearch + and many others. Stackdriver's Uptime Checks API allows you to create, + delete, and list your project's Uptime Checks. + +setup: +- auth +- install_deps + +samples: +- name: Snippets + file: snippets.py + show_help: true + +cloud_client_library: true + +folder: monitoring/api/v3/uptime-check-client \ No newline at end of file diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/noxfile.py b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/noxfile.py new file mode 100644 index 000000000000..ba55d7ce53ca --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/noxfile.py @@ -0,0 +1,224 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append('.') + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG['gcloud_project_env'] + # This should error out if not set. + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG['envs']) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + "." + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/requirements-test.txt b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/requirements-test.txt new file mode 100644 index 000000000000..d0029c6de49e --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/requirements-test.txt @@ -0,0 +1,2 @@ +backoff==1.10.0 +pytest==6.0.1 diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/requirements.txt b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/requirements.txt new file mode 100644 index 000000000000..bc7a2fe57c8b --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/requirements.txt @@ -0,0 +1,2 @@ +google-cloud-monitoring==1.1.0 +tabulate==0.8.7 diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/snippets.py b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/snippets.py new file mode 100644 index 000000000000..dcde3b58650d --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/snippets.py @@ -0,0 +1,257 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import argparse +import os +import pprint + +from google.cloud import monitoring_v3 +import tabulate + + +# [START monitoring_uptime_check_create] +def create_uptime_check_config_get(project_name, host_name=None, display_name=None): + config = monitoring_v3.types.uptime_pb2.UptimeCheckConfig() + config.display_name = display_name or "New GET uptime check" + config.monitored_resource.type = "uptime_url" + config.monitored_resource.labels.update({"host": host_name or "example.com"}) + config.http_check.request_method = ( + monitoring_v3.enums.UptimeCheckConfig.HttpCheck.RequestMethod.GET + ) + config.http_check.path = "/" + config.http_check.port = 80 + config.timeout.seconds = 10 + config.period.seconds = 300 + + client = monitoring_v3.UptimeCheckServiceClient() + new_config = client.create_uptime_check_config(project_name, config) + pprint.pprint(new_config) + return new_config + + +def create_uptime_check_config_post(project_name, host_name=None, display_name=None): + config = monitoring_v3.types.uptime_pb2.UptimeCheckConfig() + config.display_name = display_name or "New POST uptime check" + config.monitored_resource.type = "uptime_url" + config.monitored_resource.labels.update({"host": host_name or "example.com"}) + config.http_check.request_method = ( + monitoring_v3.enums.UptimeCheckConfig.HttpCheck.RequestMethod.POST + ) + config.http_check.content_type = ( + monitoring_v3.enums.UptimeCheckConfig.HttpCheck.ContentType.URL_ENCODED + ) + config.http_check.body = "foo=bar".encode("utf-8") + config.http_check.path = "/" + config.http_check.port = 80 + config.timeout.seconds = 10 + config.period.seconds = 300 + + client = monitoring_v3.UptimeCheckServiceClient() + new_config = client.create_uptime_check_config(project_name, config) + pprint.pprint(new_config) + return new_config + + +# [END monitoring_uptime_check_create] + +# [START monitoring_uptime_check_update] +def update_uptime_check_config( + config_name, new_display_name=None, new_http_check_path=None +): + client = monitoring_v3.UptimeCheckServiceClient() + config = client.get_uptime_check_config(config_name) + field_mask = monitoring_v3.types.FieldMask() + if new_display_name: + field_mask.paths.append("display_name") + config.display_name = new_display_name + if new_http_check_path: + field_mask.paths.append("http_check.path") + config.http_check.path = new_http_check_path + client.update_uptime_check_config(config, field_mask) + + +# [END monitoring_uptime_check_update] + + +# [START monitoring_uptime_check_list_configs] +def list_uptime_check_configs(project_name): + client = monitoring_v3.UptimeCheckServiceClient() + configs = client.list_uptime_check_configs(project_name) + + for config in configs: + pprint.pprint(config) + + +# [END monitoring_uptime_check_list_configs] + + +# [START monitoring_uptime_check_list_ips] +def list_uptime_check_ips(): + client = monitoring_v3.UptimeCheckServiceClient() + ips = client.list_uptime_check_ips() + print( + tabulate.tabulate( + [(ip.region, ip.location, ip.ip_address) for ip in ips], + ("region", "location", "ip_address"), + ) + ) + + +# [END monitoring_uptime_check_list_ips] + + +# [START monitoring_uptime_check_get] +def get_uptime_check_config(config_name): + client = monitoring_v3.UptimeCheckServiceClient() + config = client.get_uptime_check_config(config_name) + pprint.pprint(config) + + +# [END monitoring_uptime_check_get] + + +# [START monitoring_uptime_check_delete] +# `config_name` is the `name` field of an UptimeCheckConfig. +# See https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.uptimeCheckConfigs#UptimeCheckConfig. +def delete_uptime_check_config(config_name): + client = monitoring_v3.UptimeCheckServiceClient() + client.delete_uptime_check_config(config_name) + print("Deleted ", config_name) + + +# [END monitoring_uptime_check_delete] + + +class MissingProjectIdError(Exception): + pass + + +def project_id(): + """Retreieves the project id from the environment variable. + + Raises: + MissingProjectIdError -- When not set. + + Returns: + str -- the project name + """ + project_id = os.environ["GOOGLE_CLOUD_PROJECT"] + + if not project_id: + raise MissingProjectIdError( + "Set the environment variable " + + "GCLOUD_PROJECT to your Google Cloud Project Id." + ) + return project_id + + +def project_name(): + return "projects/" + project_id() + + +if __name__ == "__main__": + + parser = argparse.ArgumentParser( + description="Demonstrates Uptime Check API operations." + ) + + subparsers = parser.add_subparsers(dest="command") + + list_uptime_check_configs_parser = subparsers.add_parser( + "list-uptime-check-configs", help=list_uptime_check_configs.__doc__ + ) + + list_uptime_check_ips_parser = subparsers.add_parser( + "list-uptime-check-ips", help=list_uptime_check_ips.__doc__ + ) + + create_uptime_check_config_get_parser = subparsers.add_parser( + "create-uptime-check-get", help=create_uptime_check_config_get.__doc__ + ) + create_uptime_check_config_get_parser.add_argument( + "-d", "--display_name", required=False, + ) + create_uptime_check_config_get_parser.add_argument( + "-o", "--host_name", required=False, + ) + + create_uptime_check_config_post_parser = subparsers.add_parser( + "create-uptime-check-post", help=create_uptime_check_config_post.__doc__ + ) + create_uptime_check_config_post_parser.add_argument( + "-d", "--display_name", required=False, + ) + create_uptime_check_config_post_parser.add_argument( + "-o", "--host_name", required=False, + ) + + get_uptime_check_config_parser = subparsers.add_parser( + "get-uptime-check-config", help=get_uptime_check_config.__doc__ + ) + get_uptime_check_config_parser.add_argument( + "-m", "--name", required=True, + ) + + delete_uptime_check_config_parser = subparsers.add_parser( + "delete-uptime-check-config", help=delete_uptime_check_config.__doc__ + ) + delete_uptime_check_config_parser.add_argument( + "-m", "--name", required=True, + ) + + update_uptime_check_config_parser = subparsers.add_parser( + "update-uptime-check-config", help=update_uptime_check_config.__doc__ + ) + update_uptime_check_config_parser.add_argument( + "-m", "--name", required=True, + ) + update_uptime_check_config_parser.add_argument( + "-d", "--display_name", required=False, + ) + update_uptime_check_config_parser.add_argument( + "-p", "--uptime_check_path", required=False, + ) + + args = parser.parse_args() + + if args.command == "list-uptime-check-configs": + list_uptime_check_configs(project_name()) + + elif args.command == "list-uptime-check-ips": + list_uptime_check_ips() + + elif args.command == "create-uptime-check-get": + create_uptime_check_config_get( + project_name(), args.host_name, args.display_name + ) + elif args.command == "create-uptime-check-post": + create_uptime_check_config_post( + project_name(), args.host_name, args.display_name + ) + + elif args.command == "get-uptime-check-config": + get_uptime_check_config(args.name) + + elif args.command == "delete-uptime-check-config": + delete_uptime_check_config(args.name) + + elif args.command == "update-uptime-check-config": + if not args.display_name and not args.uptime_check_path: + print("Nothing to update. Pass --display_name or " "--uptime_check_path.") + else: + update_uptime_check_config( + args.name, args.display_name, args.uptime_check_path + ) diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/snippets_test.py b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/snippets_test.py new file mode 100644 index 000000000000..81d2b247372c --- /dev/null +++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/snippets_test.py @@ -0,0 +1,105 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import random +import string + +import backoff +from google.api_core.exceptions import DeadlineExceeded +import pytest + +import snippets + + +def random_name(length): + return "".join([random.choice(string.ascii_lowercase) for i in range(length)]) + + +class UptimeFixture: + """A test fixture that creates uptime check config. + """ + + def __init__(self): + self.project_id = snippets.project_id() + self.project_name = snippets.project_name() + + def __enter__(self): + # Create an uptime check config (GET request). + self.config_get = snippets.create_uptime_check_config_get( + self.project_name, display_name=random_name(10) + ) + # Create an uptime check config (POST request). + self.config_post = snippets.create_uptime_check_config_post( + self.project_name, display_name=random_name(10) + ) + return self + + def __exit__(self, type, value, traceback): + # Delete the config. + snippets.delete_uptime_check_config(self.config_get.name) + snippets.delete_uptime_check_config(self.config_post.name) + + +@pytest.fixture(scope="session") +def uptime(): + with UptimeFixture() as uptime: + yield uptime + + +def test_create_and_delete(capsys): + # create and delete happen in uptime fixture. + with UptimeFixture(): + pass + + +def test_update_uptime_config(capsys): + # create and delete happen in uptime fixture. + new_display_name = random_name(10) + new_uptime_check_path = "/" + random_name(10) + with UptimeFixture() as fixture: + # We sometimes see the permission error saying the resource + # may not exist. Weirdly DeadlineExceeded instance is raised + # in this case. + @backoff.on_exception(backoff.expo, DeadlineExceeded, max_time=120) + def call_sample(): + snippets.update_uptime_check_config( + fixture.config_get.name, new_display_name, new_uptime_check_path) + + call_sample() + + out, _ = capsys.readouterr() + snippets.get_uptime_check_config(fixture.config_get.name) + out, _ = capsys.readouterr() + assert new_display_name in out + assert new_uptime_check_path in out + + +def test_get_uptime_check_config(capsys, uptime): + snippets.get_uptime_check_config(uptime.config_get.name) + out, _ = capsys.readouterr() + assert uptime.config_get.display_name in out + + +def test_list_uptime_check_configs(capsys, uptime): + snippets.list_uptime_check_configs(uptime.project_name) + out, _ = capsys.readouterr() + assert uptime.config_get.display_name in out + + +def test_list_uptime_check_ips(capsys): + snippets.list_uptime_check_ips() + out, _ = capsys.readouterr() + assert "Singapore" in out diff --git a/packages/google-cloud-monitoring/scripts/decrypt-secrets.sh b/packages/google-cloud-monitoring/scripts/decrypt-secrets.sh index ff599eb2af25..21f6d2a26d90 100755 --- a/packages/google-cloud-monitoring/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-monitoring/scripts/decrypt-secrets.sh @@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" ) # Work from the project root. cd $ROOT +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + # Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ > testing/test-env.sh gcloud secrets versions access latest \ --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ > testing/service-account.json gcloud secrets versions access latest \ --secret="python-docs-samples-client-secrets" \ - > testing/client-secrets.json \ No newline at end of file + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-monitoring/synth.metadata b/packages/google-cloud-monitoring/synth.metadata index 06138a5bc09a..da924e9719ac 100644 --- a/packages/google-cloud-monitoring/synth.metadata +++ b/packages/google-cloud-monitoring/synth.metadata @@ -4,22 +4,21 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-monitoring.git", - "sha": "e45865745551f720d46e4f15e3da4e293c99f130" + "sha": "85a1420dbb23011681072e425a80e1130dac69e8" } }, { "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "c8e291e6a4d60771219205b653715d5aeec3e96b", - "internalRef": "311222505" + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "da29da32b3a988457b49ae290112b74f14b713cc" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "cd522c3b4dde821766d95c80ae5aeb43d7a41170" + "sha": "da29da32b3a988457b49ae290112b74f14b713cc" } } ], diff --git a/packages/google-cloud-monitoring/synth.py b/packages/google-cloud-monitoring/synth.py index 756269b8dcfa..3367fa8b9664 100644 --- a/packages/google-cloud-monitoring/synth.py +++ b/packages/google-cloud-monitoring/synth.py @@ -16,6 +16,7 @@ import synthtool as s import synthtool.gcp as gcp +from synthtool.languages import python import logging AUTOSYNTH_MULTIPLE_COMMITS = True @@ -112,9 +113,15 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=92) +templated_files = common.py_library(cov_level=92, samples=True) s.move(templated_files) +# ---------------------------------------------------------------------------- +# Samples templates +# ---------------------------------------------------------------------------- +python.py_samples(skip_readmes=True) + + # TODO(busunkim): Use latest sphinx after microgenerator transition s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"')