Skip to content

Commit

Permalink
Handle deprecation of APIs and attributes (#1170)
Browse files Browse the repository at this point in the history
* Add deprecation warnings

Add some mechanism for deprecated APIs and attributes.

* Deprecated attrs

* Add a note in APIs too

* Minor doc formatting.

* pre-commit fixes

Co-authored-by: ci.datadog-api-spec <packages@datadoghq.com>
  • Loading branch information
therve and ci.datadog-api-spec committed Sep 29, 2022
1 parent 903b123 commit 3431088
Show file tree
Hide file tree
Showing 42 changed files with 49 additions and 83 deletions.
2 changes: 1 addition & 1 deletion .generator/src/generator/formatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def header(self, text, level, raw=None):


def docstring(text):
return m2r2.convert(text.replace("\\n", "\\\\n"), renderer=CustomRenderer())[1:-1].replace("\\ ", " ").replace("\\`", "\\\\`")
return m2r2.convert(text.replace("\\n", "\\\\n"), renderer=CustomRenderer())[1:-1].replace("\\ ", " ").replace("\\`", "\\\\`").replace("\n\n\n", "\n\n")


def _merge_imports(a, b):
Expand Down
6 changes: 5 additions & 1 deletion .generator/src/generator/templates/api.j2
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ from __future__ import annotations

import collections
from typing import Any, Dict, List, Union
import warnings

from {{ package }}.api_client import ApiClient, Endpoint as _Endpoint
from {{ package }}.model_utils import (
Expand Down Expand Up @@ -141,7 +142,7 @@ class {{ classname }}:
{%- for path, method, operation in operations|sort(attribute="2.operationId") %}
{%- set returnType = operation|return_type %}
def {{ operation.operationId|safe_snake_case }}(self, {% for name, parameter in operation|parameters if parameter.required %}{{name|attribute_name}}: {{ get_type_for_parameter(parameter, typing=True) }}, {% endfor %}{% for name, parameter in operation|parameters if not parameter.required %}{% if loop.first %}*, {% endif %}{{name|attribute_name}}: Union[{{ get_type_for_parameter(parameter, typing=True) }}, UnsetType]=unset, {% endfor %}) -> {% if returnType %}{{ returnType.replace("[", "List[") }}{% else %}None{% endif %}:
"""{{ operation.summary|indent(8) }}.
"""{{ operation.summary|indent(8) }}.{% if operation.deprecated %} **Deprecated**.{% endif %}
{% if operation.description %}
{{ operation.description|docstring|indent(8) }}
{% endif %}
Expand All @@ -168,6 +169,9 @@ class {{ classname }}:
kwargs["{{ name|attribute_name }}"] = {{ name|attribute_name }}
{%- endif %}
{% endfor %}
{%- if operation.deprecated %}
warnings.warn("{{ operation.operationId|safe_snake_case }} is deprecated", DeprecationWarning, stacklevel=2)
{%- endif %}
return self._{{ operation.operationId|safe_snake_case }}_endpoint.call_with_http_info(**kwargs)
{%- if operation["x-pagination"] %}
{%- set pagination = operation["x-pagination"] %}
Expand Down
2 changes: 1 addition & 1 deletion .generator/src/generator/templates/model_generic.j2
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ class {{ name }}(ModelNormal):
{{ model.description|docstring|indent(8) }}
{%- for attr, definition in model.get("properties", {}).items() %}
{# keep new line #}
:param {{ attr|attribute_name }}: {{ definition.description|docstring|indent(12) }}
:param {{ attr|attribute_name }}: {{ definition.description|docstring|indent(12) }}{% if definition.deprecated %} **Deprecated**.{% endif %}
:type {{ attr|attribute_name }}: {{ get_type_for_attribute(model, attr, current_name=name) }}{% if definition.nullable %}, none_type{% endif %}{% if attr not in model.get("required", []) %}, optional{% endif %}
{%- endfor %}
"""
Expand Down
2 changes: 0 additions & 2 deletions src/datadog_api_client/v1/api/aws_logs_integration_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,6 @@ def check_aws_logs_lambda_async(
is the same as for Enable an AWS service log collection. Subsequent requests will always repeat the above, so this
endpoint can be polled intermittently instead of blocking.
* Returns a status of 'created' when it's checking if the Lambda exists in the account.
* Returns a status of 'waiting' while checking.
* Returns a status of 'checked and ok' if the Lambda exists.
Expand All @@ -201,7 +200,6 @@ def check_aws_logs_services_async(
Done async, so can be repeatedly polled in a non-blocking fashion until
the async request completes.
* Returns a status of ``created`` when it's checking if the permissions exists
in the AWS account.
* Returns a status of ``waiting`` while checking.
Expand Down
1 change: 0 additions & 1 deletion src/datadog_api_client/v1/api/events_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,6 @@ def list_events(
**Notes** :
*
If the event you’re querying contains markdown formatting of any kind,
you may see characters such as ``%`` , ``\\`` , ``n`` in your output.
Expand Down
7 changes: 3 additions & 4 deletions src/datadog_api_client/v1/api/logs_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from __future__ import annotations

from typing import Any, Dict, Union
import warnings

from datadog_api_client.api_client import ApiClient, Endpoint as _Endpoint
from datadog_api_client.model_utils import (
Expand Down Expand Up @@ -158,18 +159,16 @@ def submit_log(
content_encoding: Union[ContentEncoding, UnsetType] = unset,
ddtags: Union[str, UnsetType] = unset,
) -> dict:
"""Send logs.
"""Send logs. **Deprecated**.
Send your logs to your Datadog platform over HTTP. Limits per HTTP request are:
* Maximum content size per payload (uncompressed): 5MB
* Maximum size for a single log: 1MB
* Maximum array size if sending multiple logs in an array: 1000 entries
Any log exceeding 1MB is accepted and truncated by Datadog:
* For a single log request, the API truncates the log at 1MB and returns a 2xx.
* For a multi-logs request, the API processes all logs, truncates only logs larger than 1MB, and returns a 2xx.
Expand All @@ -178,7 +177,6 @@ def submit_log(
The status codes answered by the HTTP API are:
* 200: OK
* 400: Bad request (likely an issue in the payload formatting)
* 403: Permission issue (likely using an invalid API Key)
Expand All @@ -202,4 +200,5 @@ def submit_log(

kwargs["body"] = body

warnings.warn("submit_log is deprecated", DeprecationWarning, stacklevel=2)
return self._submit_log_endpoint.call_with_http_info(**kwargs)
1 change: 0 additions & 1 deletion src/datadog_api_client/v1/api/logs_pipelines_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ class LogsPipelinesApi:
Pipelines and processors operate on incoming logs, parsing
and transforming them into structured attributes for easier querying.
*
See the `pipelines configuration page <https://app.datadoghq.com/logs/pipelines>`_
for a list of the pipelines and processors currently configured in web UI.
Expand Down
2 changes: 0 additions & 2 deletions src/datadog_api_client/v1/api/metrics_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ class MetricsApi:
"""
The metrics endpoint allows you to:
* Post metrics data so it can be graphed on Datadog’s dashboards
* Query metrics from any time period
* Modify tag configurations for metrics
Expand Down Expand Up @@ -374,7 +373,6 @@ def submit_metrics(
If you’re submitting metrics directly to the Datadog API without using DogStatsD, expect:
* 64 bits for the timestamp
* 64 bits for the value
* 40 bytes for the metric names
Expand Down
16 changes: 0 additions & 16 deletions src/datadog_api_client/v1/api/monitors_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -382,7 +382,6 @@ def create_monitor(
The type of monitor chosen from:
* anomaly: ``query alert``
* APM: ``query alert`` or ``trace-analytics alert``
* composite: ``composite``
Expand Down Expand Up @@ -410,7 +409,6 @@ def create_monitor(
Example: ``time_aggr(time_window):space_aggr:metric{tags} [by {key}] operator #``
* ``time_aggr`` : avg, sum, max, min, change, or pct_change
* ``time_window`` : ``last_#m`` (with ``#`` between 1 and 10080 depending on the monitor type) or ``last_#h`` (with ``#`` between 1 and 168 depending on the monitor type) or ``last_1d`` , or ``last_1w``
* ``space_aggr`` : avg, sum, min, or max
Expand All @@ -422,7 +420,6 @@ def create_monitor(
If you are using the ``_change_`` or ``_pct_change_`` time aggregator, instead use ``change_aggr(time_aggr(time_window),
timeshift):space_aggr:metric{tags} [by {key}] operator #`` with:
* ``change_aggr`` change, pct_change
* ``time_aggr`` avg, sum, max, min `Learn more <https://docs.datadoghq.com/monitors/create/types/#define-the-conditions>`_
* ``time_window`` last_#m (between 1 and 2880 depending on the monitor type), last_#h (between 1 and 48 depending on the monitor type), or last_#d (1 or 2)
Expand All @@ -435,7 +432,6 @@ def create_monitor(
Example: ``"check".over(tags).last(count).by(group).count_by_status()``
* ``check`` name of the check, for example ``datadog.agent.up``
* ``tags`` one or more quoted tags (comma-separated), or "*". for example: ``.over("env:prod", "role:db")`` ; ``over`` cannot be blank.
* ``count`` must be at greater than or equal to your max threshold (defined in the ``options`` ). It is limited to 100.
Expand All @@ -447,7 +443,6 @@ def create_monitor(
Example: ``events('sources:nagios status:error,warning priority:normal tags: "string query"').rollup("count").last("1h")"``
* ``event`` , the event query string:
* ``string_query`` free text query to match against event title and text.
* ``sources`` event sources (comma-separated).
Expand All @@ -465,7 +460,6 @@ def create_monitor(
Example: ``events(query).rollup(rollup_method[, measure]).last(time_window) operator #``
* ``query`` The search query - following the `Log search syntax <https://docs.datadoghq.com/logs/search_syntax/>`_.
* ``rollup_method`` The stats roll-up method - supports ``count`` , ``avg`` and ``cardinality``.
* ``measure`` For ``avg`` and cardinality ``rollup_method`` - specify the measure or the facet name you want to use.
Expand All @@ -477,7 +471,6 @@ def create_monitor(
Example: ``processes(search).over(tags).rollup('count').last(timeframe) operator #``
* ``search`` free text search string for querying processes.
Matching processes match results on the `Live Processes <https://docs.datadoghq.com/infrastructure/process/?tab=linuxwindows>`_ page.
* ``tags`` one or more tags (comma-separated)
Expand All @@ -489,7 +482,6 @@ def create_monitor(
Example: ``logs(query).index(index_name).rollup(rollup_method[, measure]).last(time_window) operator #``
* ``query`` The search query - following the `Log search syntax <https://docs.datadoghq.com/logs/search_syntax/>`_.
* ``index_name`` For multi-index organizations, the log index in which the request is performed.
* ``rollup_method`` The stats roll-up method - supports ``count`` , ``avg`` and ``cardinality``.
Expand All @@ -502,7 +494,6 @@ def create_monitor(
Example: ``12345 && 67890`` , where ``12345`` and ``67890`` are the IDs of non-composite monitors
* ``name`` [ *required* , *default* = **dynamic, based on query** ]: The name of the alert.
* ``message`` [ *required* , *default* = **dynamic, based on query** ]: A message to include with notifications for this monitor.
Email notifications can be sent to specific users by using the same '@username' notation as events.
Expand All @@ -514,7 +505,6 @@ def create_monitor(
Example: ``error_budget("slo_id").over("time_window") operator #``
* ``slo_id`` : The alphanumeric SLO ID of the SLO you are configuring the alert for.
* `time_window`: The time window of the SLO target you wish to alert on. Valid options: ``7d`` , ``30d`` , ``90d``.
* ``operator`` : ``>=`` or ``>``
Expand All @@ -523,7 +513,6 @@ def create_monitor(
Example: ``audits(query).rollup(rollup_method[, measure]).last(time_window) operator #``
* ``query`` The search query - following the `Log search syntax <https://docs.datadoghq.com/logs/search_syntax/>`_.
* ``rollup_method`` The stats roll-up method - supports ``count`` , ``avg`` and ``cardinality``.
* ``measure`` For ``avg`` and cardinality ``rollup_method`` - specify the measure or the facet name you want to use.
Expand All @@ -537,7 +526,6 @@ def create_monitor(
Example: ``ci-pipelines(query).rollup(rollup_method[, measure]).last(time_window) operator #``
* ``query`` The search query - following the `Log search syntax <https://docs.datadoghq.com/logs/search_syntax/>`_.
* ``rollup_method`` The stats roll-up method - supports ``count`` , ``avg`` , and ``cardinality``.
* ``measure`` For ``avg`` and cardinality ``rollup_method`` - specify the measure or the facet name you want to use.
Expand All @@ -551,7 +539,6 @@ def create_monitor(
Example: ``ci-tests(query).rollup(rollup_method[, measure]).last(time_window) operator #``
* ``query`` The search query - following the `Log search syntax <https://docs.datadoghq.com/logs/search_syntax/>`_.
* ``rollup_method`` The stats roll-up method - supports ``count`` , ``avg`` , and ``cardinality``.
* ``measure`` For ``avg`` and cardinality ``rollup_method`` - specify the measure or the facet name you want to use.
Expand All @@ -566,7 +553,6 @@ def create_monitor(
Example(RUM): ``error-tracking-rum(query).rollup(rollup_method[, measure]).last(time_window) operator #``
Example(APM Traces): ``error-tracking-traces(query).rollup(rollup_method[, measure]).last(time_window) operator #``
* ``query`` The search query - following the `Log search syntax <https://docs.datadoghq.com/logs/search_syntax/>`_.
* ``rollup_method`` The stats roll-up method - supports ``count`` , ``avg`` , and ``cardinality``.
* ``measure`` For ``avg`` and cardinality ``rollup_method`` - specify the measure or the facet name you want to use.
Expand Down Expand Up @@ -719,7 +705,6 @@ def search_monitor_groups(
:type per_page: int, optional
:param sort: String for sort order, composed of field and sort order separate by a comma, for example ``name,asc``. Supported sort directions: ``asc`` , ``desc``. Supported fields:
* ``name``
* ``status``
* ``tags``
Expand Down Expand Up @@ -765,7 +750,6 @@ def search_monitors(
:type per_page: int, optional
:param sort: String for sort order, composed of field and sort order separate by a comma, for example ``name,asc``. Supported sort directions: ``asc`` , ``desc``. Supported fields:
* ``name``
* ``status``
* ``tags``
Expand Down
1 change: 0 additions & 1 deletion src/datadog_api_client/v1/api/organizations_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,6 @@ def upload_idp_for_org(
There are a couple of options for updating the Identity Provider (IdP)
metadata from your SAML IdP.
*
**Multipart Form-Data** : Post the IdP metadata file using a form post.
Expand Down
2 changes: 0 additions & 2 deletions src/datadog_api_client/v1/api/service_checks_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ class ServiceChecksApi:
are limited for checks with a Critical or Warning status, they are dropped for checks with
an OK status.
* `Read more about Service Check monitors. <https://docs.datadoghq.com/monitors/create/types/host/?tab=checkalert>`_
* `Read more about Process Check monitors. <https://docs.datadoghq.com/monitors/create/types/process_check/?tab=checkalert>`_
* `Read more about Network Check monitors. <https://docs.datadoghq.com/monitors/create/types/network/?tab=checkalert>`_
Expand Down Expand Up @@ -63,7 +62,6 @@ def submit_service_check(
**Notes** :
* A valid API key is required.
* Service checks can be submitted up to 10 minutes in the past.
Expand Down
19 changes: 13 additions & 6 deletions src/datadog_api_client/v1/api/usage_metering_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from __future__ import annotations

from typing import Any, Dict, List, Union
import warnings

from datadog_api_client.api_client import ApiClient, Endpoint as _Endpoint
from datadog_api_client.model_utils import (
Expand Down Expand Up @@ -1346,7 +1347,7 @@ def get_daily_custom_reports(
sort_dir: Union[UsageSortDirection, UnsetType] = unset,
sort: Union[UsageSort, UnsetType] = unset,
) -> UsageCustomReportsResponse:
"""Get the list of available daily custom reports.
"""Get the list of available daily custom reports. **Deprecated**.
Get daily custom reports.
**Note:** This endpoint will be fully deprecated on December 1, 2022.
Expand Down Expand Up @@ -1375,6 +1376,7 @@ def get_daily_custom_reports(
if sort is not unset:
kwargs["sort"] = sort

warnings.warn("get_daily_custom_reports is deprecated", DeprecationWarning, stacklevel=2)
return self._get_daily_custom_reports_endpoint.call_with_http_info(**kwargs)

def get_hourly_usage_attribution(
Expand Down Expand Up @@ -1501,7 +1503,7 @@ def get_monthly_custom_reports(
sort_dir: Union[UsageSortDirection, UnsetType] = unset,
sort: Union[UsageSort, UnsetType] = unset,
) -> UsageCustomReportsResponse:
"""Get the list of available monthly custom reports.
"""Get the list of available monthly custom reports. **Deprecated**.
Get monthly custom reports.
**Note:** This endpoint will be fully deprecated on December 1, 2022.
Expand Down Expand Up @@ -1530,6 +1532,7 @@ def get_monthly_custom_reports(
if sort is not unset:
kwargs["sort"] = sort

warnings.warn("get_monthly_custom_reports is deprecated", DeprecationWarning, stacklevel=2)
return self._get_monthly_custom_reports_endpoint.call_with_http_info(**kwargs)

def get_monthly_usage_attribution(
Expand Down Expand Up @@ -1612,7 +1615,7 @@ def get_specified_daily_custom_reports(
self,
report_id: str,
) -> UsageSpecifiedCustomReportsResponse:
"""Get specified daily custom reports.
"""Get specified daily custom reports. **Deprecated**.
Get specified daily custom reports.
**Note:** This endpoint will be fully deprecated on December 1, 2022.
Expand All @@ -1625,13 +1628,14 @@ def get_specified_daily_custom_reports(
kwargs: Dict[str, Any] = {}
kwargs["report_id"] = report_id

warnings.warn("get_specified_daily_custom_reports is deprecated", DeprecationWarning, stacklevel=2)
return self._get_specified_daily_custom_reports_endpoint.call_with_http_info(**kwargs)

def get_specified_monthly_custom_reports(
self,
report_id: str,
) -> UsageSpecifiedCustomReportsResponse:
"""Get specified monthly custom reports.
"""Get specified monthly custom reports. **Deprecated**.
Get specified monthly custom reports.
**Note:** This endpoint will be fully deprecated on December 1, 2022.
Expand All @@ -1644,6 +1648,7 @@ def get_specified_monthly_custom_reports(
kwargs: Dict[str, Any] = {}
kwargs["report_id"] = report_id

warnings.warn("get_specified_monthly_custom_reports is deprecated", DeprecationWarning, stacklevel=2)
return self._get_specified_monthly_custom_reports_endpoint.call_with_http_info(**kwargs)

def get_usage_analyzed_logs(
Expand Down Expand Up @@ -1684,7 +1689,7 @@ def get_usage_attribution(
offset: Union[int, UnsetType] = unset,
limit: Union[int, UnsetType] = unset,
) -> UsageAttributionResponse:
"""Get usage attribution.
"""Get usage attribution. **Deprecated**.
Get usage attribution.
**Note:** This endpoint will be fully deprecated on December 1, 2022.
Expand Down Expand Up @@ -1732,6 +1737,7 @@ def get_usage_attribution(
if limit is not unset:
kwargs["limit"] = limit

warnings.warn("get_usage_attribution is deprecated", DeprecationWarning, stacklevel=2)
return self._get_usage_attribution_endpoint.call_with_http_info(**kwargs)

def get_usage_audit_logs(
Expand Down Expand Up @@ -2338,7 +2344,7 @@ def get_usage_synthetics(
*,
end_hr: Union[datetime, UnsetType] = unset,
) -> UsageSyntheticsResponse:
"""Get hourly usage for synthetics checks.
"""Get hourly usage for synthetics checks. **Deprecated**.
Get hourly usage for `synthetics checks <https://docs.datadoghq.com/synthetics/>`_.
**Note:** hourly usage data for all products is now available in the `Get hourly usage by product family API <https://docs.datadoghq.com/api/latest/usage-metering/#get-hourly-usage-by-product-family>`_. Refer to `Migrating from the V1 Hourly Usage APIs to V2 <https://docs.datadoghq.com/account_management/guide/hourly-usage-migration/>`_ for the associated migration guide.
Expand All @@ -2355,6 +2361,7 @@ def get_usage_synthetics(
if end_hr is not unset:
kwargs["end_hr"] = end_hr

warnings.warn("get_usage_synthetics is deprecated", DeprecationWarning, stacklevel=2)
return self._get_usage_synthetics_endpoint.call_with_http_info(**kwargs)

def get_usage_synthetics_api(
Expand Down
Loading

0 comments on commit 3431088

Please sign in to comment.