diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/__init__.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/__init__.py
index 5f92f88a4629..320e9412988f 100644
--- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/__init__.py
+++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/__init__.py
@@ -5,17 +5,25 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._patch import LogsIngestionClient
+from typing import TYPE_CHECKING
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
-from ._patch import LogsUploadError
+from ._client import LogsIngestionClient # type: ignore
+
+try:
+ from ._patch import __all__ as _patch_all
+ from ._patch import *
+except ImportError:
+ _patch_all = []
from ._patch import patch_sdk as _patch_sdk
__all__ = [
- "LogsUploadError",
"LogsIngestionClient",
]
-
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_client.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_client.py
index 2029d7cbd28e..e56331b0cc36 100644
--- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_client.py
+++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_client.py
@@ -8,6 +8,7 @@
from copy import deepcopy
from typing import Any, TYPE_CHECKING
+from typing_extensions import Self
from azure.core import PipelineClient
from azure.core.pipeline import policies
@@ -18,11 +19,10 @@
from ._serialization import Deserializer, Serializer
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class LogsIngestionClient(LogsIngestionClientOperationsMixin): # pylint: disable=client-accepts-api-version-keyword
+class LogsIngestionClient(LogsIngestionClientOperationsMixin):
"""Azure Monitor Data Collection Python Client.
:param endpoint: The Data Collection Endpoint for the Data Collection Rule, for example
@@ -90,7 +90,7 @@ def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs:
def close(self) -> None:
self._client.close()
- def __enter__(self) -> "LogsIngestionClient":
+ def __enter__(self) -> Self:
self._client.__enter__()
return self
diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_configuration.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_configuration.py
index fe4cf6f2f115..02a9843af91a 100644
--- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_configuration.py
+++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_configuration.py
@@ -11,13 +11,12 @@
from azure.core.pipeline import policies
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
VERSION = "unknown"
-class LogsIngestionClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class LogsIngestionClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for LogsIngestionClient.
Note that all parameters used to create this instance are saved as instance
diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/__init__.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/__init__.py
index dd3d0ef4d235..4bf65f393a39 100644
--- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/__init__.py
+++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/__init__.py
@@ -5,14 +5,21 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._patch import LogsIngestionClientOperationsMixin
+from typing import TYPE_CHECKING
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+from ._operations import LogsIngestionClientOperationsMixin # type: ignore
+
+from ._patch import __all__ as _patch_all
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"LogsIngestionClientOperationsMixin",
]
-
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_operations.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_operations.py
index ea0d1703bb52..2eff843c47dc 100644
--- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_operations.py
+++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, List, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -29,7 +28,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -71,8 +70,9 @@ def build_logs_ingestion_upload_request(
class LogsIngestionClientOperationsMixin(LogsIngestionClientMixinABC):
+
@overload
- def _upload( # pylint: disable=inconsistent-return-statements
+ def _upload(
self,
rule_id: str,
stream: str,
@@ -82,9 +82,8 @@ def _upload( # pylint: disable=inconsistent-return-statements
content_type: str = "application/json",
**kwargs: Any
) -> None: ...
-
@overload
- def _upload( # pylint: disable=inconsistent-return-statements
+ def _upload(
self,
rule_id: str,
stream: str,
@@ -122,7 +121,7 @@ def _upload( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -168,8 +167,6 @@ def _upload( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [204]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_serialization.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_serialization.py
index 2f781d740827..e2ad51869908 100644
--- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_serialization.py
+++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -52,7 +52,6 @@
MutableMapping,
Type,
List,
- Mapping,
)
try:
@@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -144,6 +145,8 @@ def _json_attemp(data):
# context otherwise.
_LOGGER.critical("Wasn't XML not JSON, failing")
raise DeserializationError("XML is invalid") from err
+ elif content_type.startswith("text/"):
+ return data_as_str
raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
@classmethod
@@ -153,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -182,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -204,7 +227,7 @@ class _FixedOffset(datetime.tzinfo): # type: ignore
:param datetime.timedelta offset: offset in timedelta format
"""
- def __init__(self, offset):
+ def __init__(self, offset) -> None:
self.__offset = offset
def utcoffset(self, dt):
@@ -233,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
self.additional_properties: Optional[Dict[str, Any]] = {}
- for k in kwargs:
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -298,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None:
setattr(self, k, kwargs[k])
def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
def __str__(self) -> str:
@@ -324,7 +366,11 @@ def is_xml_model(cls) -> bool:
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -344,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
def as_dict(
self,
@@ -378,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -393,7 +444,7 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@@ -406,6 +457,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@@ -424,9 +476,11 @@ def from_dict(
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
deserializer.key_extractors = ( # type: ignore
@@ -446,21 +500,25 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
- Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
if not isinstance(response, ET.Element):
rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
else:
subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
if subtype_value:
@@ -499,11 +557,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer(object): # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -538,7 +598,7 @@ class Serializer(object):
"multiple": lambda x, y: x % y != 0,
}
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.serialize_type = {
"iso-8601": Serializer.serialize_iso,
"rfc-1123": Serializer.serialize_rfc,
@@ -558,13 +618,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -590,12 +653,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -631,7 +696,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -662,17 +728,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
raise SerializationError(msg) from err
- else:
- return serialized
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
@@ -701,7 +767,7 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
raise SerializationError("Unable to build a model: " + str(err)) from err
@@ -710,9 +776,11 @@ def body(self, data, data_type, **kwargs):
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -726,21 +794,20 @@ def url(self, name, data, data_type, **kwargs):
output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :keyword bool skip_quote: Whether to skip quote the serialized result.
- Defaults to False.
:rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
@@ -757,19 +824,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -778,21 +846,20 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
@@ -803,7 +870,7 @@ def serialize_data(self, data, data_type, **kwargs):
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -819,11 +886,10 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
raise SerializationError(msg.format(data, data_type)) from err
- else:
- return self._serialize(data, **kwargs)
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -839,23 +905,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -869,8 +938,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -880,15 +948,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
- :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -943,9 +1009,8 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
@@ -969,7 +1034,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -977,6 +1042,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -1001,7 +1067,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1032,56 +1098,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1089,11 +1160,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1103,30 +1175,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1139,12 +1213,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1170,13 +1245,14 @@ def serialize_iso(attr, **kwargs):
raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1184,11 +1260,11 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
@@ -1209,7 +1285,9 @@ def rest_key_extractor(attr, attr_desc, data):
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1230,17 +1308,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1277,7 +1367,7 @@ def _extract_name_from_internal_type(internal_type):
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1329,22 +1419,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1361,9 +1450,9 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.deserialize_type = {
"iso-8601": Deserializer.deserialize_iso,
"rfc-1123": Deserializer.deserialize_rfc,
@@ -1401,11 +1490,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1414,12 +1504,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1438,13 +1529,13 @@ def _deserialize(self, target_obj, data):
if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
- if data is None:
+ if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1474,9 +1565,8 @@ def _deserialize(self, target_obj, data):
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
raise DeserializationError(msg) from err
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1503,6 +1593,8 @@ def _classify_target(self, target, data):
:param str target: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
@@ -1514,7 +1606,7 @@ def _classify_target(self, target, data):
return target, target
try:
- target = target._classify(data, self.dependencies) # type: ignore
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
return target, target.__class__.__name__ # type: ignore
@@ -1529,10 +1621,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
:param str target_obj: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1550,10 +1644,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1577,24 +1673,35 @@ def _unpack_content(raw_data, content_type=None):
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("readonly")
+ ]
+ const = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("constant")
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
setattr(response_obj, attr, attrs.get(attr))
if additional_properties:
- response_obj.additional_properties = additional_properties
+ response_obj.additional_properties = additional_properties # type: ignore
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1603,15 +1710,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1625,7 +1733,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1645,14 +1757,14 @@ def deserialize_data(self, data, data_type):
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
raise DeserializationError(msg) from err
- else:
- return self._deserialize(obj_type, data)
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1669,6 +1781,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1679,11 +1792,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1718,11 +1832,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1730,6 +1843,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1741,24 +1855,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, str):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1766,6 +1879,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1779,8 +1893,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1792,6 +1905,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1802,9 +1916,9 @@ def deserialize_enum(data, enum_obj):
# Workaround. We might consider remove it in the future.
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1820,6 +1934,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1832,6 +1947,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1847,8 +1963,9 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
@@ -1863,6 +1980,7 @@ def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1875,6 +1993,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1885,14 +2004,14 @@ def deserialize_duration(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
raise DeserializationError(msg) from err
- else:
- return duration
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1908,6 +2027,7 @@ def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1922,6 +2042,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1937,14 +2058,14 @@ def deserialize_rfc(attr):
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1974,8 +2095,7 @@ def deserialize_iso(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1983,6 +2103,7 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
@@ -1994,5 +2115,4 @@ def deserialize_unix(attr):
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_vendor.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_vendor.py
index d24f8d5ac6cb..1e41de430fa6 100644
--- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_vendor.py
+++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_vendor.py
@@ -11,7 +11,6 @@
from ._configuration import LogsIngestionClientConfiguration
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core import PipelineClient
from ._serialization import Deserializer, Serializer
diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/__init__.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/__init__.py
index 93e4aa376af9..320e9412988f 100644
--- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/__init__.py
+++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/__init__.py
@@ -5,15 +5,25 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._patch import LogsIngestionClient
+from typing import TYPE_CHECKING
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+from ._client import LogsIngestionClient # type: ignore
+
+try:
+ from ._patch import __all__ as _patch_all
+ from ._patch import *
+except ImportError:
+ _patch_all = []
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"LogsIngestionClient",
]
-
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_client.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_client.py
index f7160c828671..790c3684c952 100644
--- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_client.py
+++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_client.py
@@ -8,6 +8,7 @@
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
+from typing_extensions import Self
from azure.core import AsyncPipelineClient
from azure.core.pipeline import policies
@@ -18,11 +19,10 @@
from ._operations import LogsIngestionClientOperationsMixin
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class LogsIngestionClient(LogsIngestionClientOperationsMixin): # pylint: disable=client-accepts-api-version-keyword
+class LogsIngestionClient(LogsIngestionClientOperationsMixin):
"""Azure Monitor Data Collection Python Client.
:param endpoint: The Data Collection Endpoint for the Data Collection Rule, for example
@@ -92,7 +92,7 @@ def send_request(
async def close(self) -> None:
await self._client.close()
- async def __aenter__(self) -> "LogsIngestionClient":
+ async def __aenter__(self) -> Self:
await self._client.__aenter__()
return self
diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_configuration.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_configuration.py
index b3023503d074..4f5fa166407f 100644
--- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_configuration.py
+++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_configuration.py
@@ -11,13 +11,12 @@
from azure.core.pipeline import policies
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
VERSION = "unknown"
-class LogsIngestionClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class LogsIngestionClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for LogsIngestionClient.
Note that all parameters used to create this instance are saved as instance
diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/__init__.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/__init__.py
index dd3d0ef4d235..4bf65f393a39 100644
--- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/__init__.py
+++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/__init__.py
@@ -5,14 +5,21 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._patch import LogsIngestionClientOperationsMixin
+from typing import TYPE_CHECKING
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+from ._operations import LogsIngestionClientOperationsMixin # type: ignore
+
+from ._patch import __all__ as _patch_all
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"LogsIngestionClientOperationsMixin",
]
-
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_operations.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_operations.py
index 5f66be399dc0..523b9c603bca 100644
--- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_operations.py
+++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, List, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -29,15 +28,16 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class LogsIngestionClientOperationsMixin(LogsIngestionClientMixinABC):
+
@overload
- async def _upload( # pylint: disable=inconsistent-return-statements
+ async def _upload(
self,
rule_id: str,
stream: str,
@@ -47,9 +47,8 @@ async def _upload( # pylint: disable=inconsistent-return-statements
content_type: str = "application/json",
**kwargs: Any
) -> None: ...
-
@overload
- async def _upload( # pylint: disable=inconsistent-return-statements
+ async def _upload(
self,
rule_id: str,
stream: str,
@@ -61,7 +60,7 @@ async def _upload( # pylint: disable=inconsistent-return-statements
) -> None: ...
@distributed_trace_async
- async def _upload( # pylint: disable=inconsistent-return-statements
+ async def _upload(
self,
rule_id: str,
stream: str,
@@ -87,7 +86,7 @@ async def _upload( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -133,8 +132,6 @@ async def _upload( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [204]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_vendor.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_vendor.py
index c6566b4a63ae..fcc0b096e1ea 100644
--- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_vendor.py
+++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_vendor.py
@@ -11,7 +11,6 @@
from ._configuration import LogsIngestionClientConfiguration
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core import AsyncPipelineClient
from .._serialization import Deserializer, Serializer
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/__init__.py
index 74ab94e4977e..70b9d32ba891 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/__init__.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._client import MonitorQueryClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._client import MonitorQueryClient # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"MonitorQueryClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_client.py
index 47be976b1bf8..dec20111e5d0 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_client.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_client.py
@@ -8,6 +8,7 @@
from copy import deepcopy
from typing import Any
+from typing_extensions import Self
from azure.core import PipelineClient
from azure.core.pipeline import policies
@@ -83,7 +84,7 @@ def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs:
def close(self) -> None:
self._client.close()
- def __enter__(self) -> "MonitorQueryClient":
+ def __enter__(self) -> Self:
self._client.__enter__()
return self
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_configuration.py
index 9e8beeb05f4d..8962d9b6e7fe 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_configuration.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_configuration.py
@@ -13,7 +13,7 @@
VERSION = "unknown"
-class MonitorQueryClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class MonitorQueryClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for MonitorQueryClient.
Note that all parameters used to create this instance are saved as instance
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_serialization.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_serialization.py
index 2f781d740827..e2ad51869908 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_serialization.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -52,7 +52,6 @@
MutableMapping,
Type,
List,
- Mapping,
)
try:
@@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -144,6 +145,8 @@ def _json_attemp(data):
# context otherwise.
_LOGGER.critical("Wasn't XML not JSON, failing")
raise DeserializationError("XML is invalid") from err
+ elif content_type.startswith("text/"):
+ return data_as_str
raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
@classmethod
@@ -153,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -182,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -204,7 +227,7 @@ class _FixedOffset(datetime.tzinfo): # type: ignore
:param datetime.timedelta offset: offset in timedelta format
"""
- def __init__(self, offset):
+ def __init__(self, offset) -> None:
self.__offset = offset
def utcoffset(self, dt):
@@ -233,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
self.additional_properties: Optional[Dict[str, Any]] = {}
- for k in kwargs:
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -298,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None:
setattr(self, k, kwargs[k])
def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
def __str__(self) -> str:
@@ -324,7 +366,11 @@ def is_xml_model(cls) -> bool:
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -344,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
def as_dict(
self,
@@ -378,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -393,7 +444,7 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@@ -406,6 +457,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@@ -424,9 +476,11 @@ def from_dict(
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
deserializer.key_extractors = ( # type: ignore
@@ -446,21 +500,25 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
- Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
if not isinstance(response, ET.Element):
rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
else:
subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
if subtype_value:
@@ -499,11 +557,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer(object): # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -538,7 +598,7 @@ class Serializer(object):
"multiple": lambda x, y: x % y != 0,
}
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.serialize_type = {
"iso-8601": Serializer.serialize_iso,
"rfc-1123": Serializer.serialize_rfc,
@@ -558,13 +618,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -590,12 +653,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -631,7 +696,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -662,17 +728,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
raise SerializationError(msg) from err
- else:
- return serialized
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
@@ -701,7 +767,7 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
raise SerializationError("Unable to build a model: " + str(err)) from err
@@ -710,9 +776,11 @@ def body(self, data, data_type, **kwargs):
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -726,21 +794,20 @@ def url(self, name, data, data_type, **kwargs):
output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :keyword bool skip_quote: Whether to skip quote the serialized result.
- Defaults to False.
:rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
@@ -757,19 +824,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -778,21 +846,20 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
@@ -803,7 +870,7 @@ def serialize_data(self, data, data_type, **kwargs):
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -819,11 +886,10 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
raise SerializationError(msg.format(data, data_type)) from err
- else:
- return self._serialize(data, **kwargs)
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -839,23 +905,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -869,8 +938,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -880,15 +948,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
- :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -943,9 +1009,8 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
@@ -969,7 +1034,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -977,6 +1042,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -1001,7 +1067,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1032,56 +1098,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1089,11 +1160,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1103,30 +1175,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1139,12 +1213,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1170,13 +1245,14 @@ def serialize_iso(attr, **kwargs):
raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1184,11 +1260,11 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
@@ -1209,7 +1285,9 @@ def rest_key_extractor(attr, attr_desc, data):
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1230,17 +1308,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1277,7 +1367,7 @@ def _extract_name_from_internal_type(internal_type):
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1329,22 +1419,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1361,9 +1450,9 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.deserialize_type = {
"iso-8601": Deserializer.deserialize_iso,
"rfc-1123": Deserializer.deserialize_rfc,
@@ -1401,11 +1490,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1414,12 +1504,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1438,13 +1529,13 @@ def _deserialize(self, target_obj, data):
if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
- if data is None:
+ if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1474,9 +1565,8 @@ def _deserialize(self, target_obj, data):
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
raise DeserializationError(msg) from err
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1503,6 +1593,8 @@ def _classify_target(self, target, data):
:param str target: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
@@ -1514,7 +1606,7 @@ def _classify_target(self, target, data):
return target, target
try:
- target = target._classify(data, self.dependencies) # type: ignore
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
return target, target.__class__.__name__ # type: ignore
@@ -1529,10 +1621,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
:param str target_obj: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1550,10 +1644,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1577,24 +1673,35 @@ def _unpack_content(raw_data, content_type=None):
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("readonly")
+ ]
+ const = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("constant")
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
setattr(response_obj, attr, attrs.get(attr))
if additional_properties:
- response_obj.additional_properties = additional_properties
+ response_obj.additional_properties = additional_properties # type: ignore
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1603,15 +1710,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1625,7 +1733,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1645,14 +1757,14 @@ def deserialize_data(self, data, data_type):
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
raise DeserializationError(msg) from err
- else:
- return self._deserialize(obj_type, data)
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1669,6 +1781,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1679,11 +1792,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1718,11 +1832,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1730,6 +1843,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1741,24 +1855,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, str):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1766,6 +1879,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1779,8 +1893,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1792,6 +1905,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1802,9 +1916,9 @@ def deserialize_enum(data, enum_obj):
# Workaround. We might consider remove it in the future.
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1820,6 +1934,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1832,6 +1947,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1847,8 +1963,9 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
@@ -1863,6 +1980,7 @@ def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1875,6 +1993,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1885,14 +2004,14 @@ def deserialize_duration(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
raise DeserializationError(msg) from err
- else:
- return duration
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1908,6 +2027,7 @@ def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1922,6 +2042,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1937,14 +2058,14 @@ def deserialize_rfc(attr):
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1974,8 +2095,7 @@ def deserialize_iso(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1983,6 +2103,7 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
@@ -1994,5 +2115,4 @@ def deserialize_unix(attr):
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/__init__.py
index 74ab94e4977e..70b9d32ba891 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/__init__.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._client import MonitorQueryClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._client import MonitorQueryClient # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"MonitorQueryClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_client.py
index 677c4dcbefb6..64d5c312b008 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_client.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_client.py
@@ -8,6 +8,7 @@
from copy import deepcopy
from typing import Any, Awaitable
+from typing_extensions import Self
from azure.core import AsyncPipelineClient
from azure.core.pipeline import policies
@@ -85,7 +86,7 @@ def send_request(
async def close(self) -> None:
await self._client.close()
- async def __aenter__(self) -> "MonitorQueryClient":
+ async def __aenter__(self) -> Self:
await self._client.__aenter__()
return self
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_configuration.py
index 94be0b988b5f..4e0060b03211 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_configuration.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_configuration.py
@@ -13,7 +13,7 @@
VERSION = "unknown"
-class MonitorQueryClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class MonitorQueryClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for MonitorQueryClient.
Note that all parameters used to create this instance are saved as instance
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/__init__.py
index f20f4cbfa4f3..e4231c981f36 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/__init__.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/__init__.py
@@ -5,17 +5,23 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import QueryOperations
-from ._operations import MetadataOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import QueryOperations # type: ignore
+from ._operations import MetadataOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"QueryOperations",
"MetadataOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/_operations.py
index f60fbf3296e4..e2fba7296b10 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/_operations.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -9,7 +9,7 @@
import datetime
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -39,7 +39,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -66,7 +66,6 @@ def __init__(self, *args, **kwargs) -> None:
async def get(
self, workspace_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query.
Executes an Analytics query for data.
@@ -95,48 +94,39 @@ async def get(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -166,8 +156,6 @@ async def get(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -191,7 +179,6 @@ async def execute(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query.
Executes an Analytics query for data. `Here
@@ -220,13 +207,10 @@ async def execute(
# JSON input template you can fill out and use as your body input.
body = {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The timespan over which to query
- data. This is an ISO8601 time period value. This timespan is applied in addition
- to any that are specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to query in addition to the
- primary workspace.
+ "str"
]
}
@@ -236,45 +220,36 @@ async def execute(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
@@ -288,7 +263,6 @@ async def execute(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query.
Executes an Analytics query for data. `Here
@@ -321,45 +295,36 @@ async def execute(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
@@ -367,7 +332,6 @@ async def execute(
async def execute(
self, workspace_id: str, body: Union[JSON, IO[bytes]], *, prefer: Optional[str] = None, **kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query.
Executes an Analytics query for data. `Here
@@ -393,13 +357,10 @@ async def execute(
# JSON input template you can fill out and use as your body input.
body = {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The timespan over which to query
- data. This is an ISO8601 time period value. This timespan is applied in addition
- to any that are specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to query in addition to the
- primary workspace.
+ "str"
]
}
@@ -409,48 +370,39 @@ async def execute(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -491,8 +443,6 @@ async def execute(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -510,7 +460,6 @@ async def execute(
async def resource_get(
self, resource_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource URI.
Executes an Analytics query for data in the context of a resource. `Here
@@ -540,48 +489,39 @@ async def resource_get(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -611,8 +551,6 @@ async def resource_get(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -636,7 +574,6 @@ async def resource_execute(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource ID.
Executes an Analytics query for data in the context of a resource. `Here
@@ -664,13 +601,10 @@ async def resource_execute(
# JSON input template you can fill out and use as your body input.
body = {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The timespan over which to query
- data. This is an ISO8601 time period value. This timespan is applied in addition
- to any that are specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to query in addition to the
- primary workspace.
+ "str"
]
}
@@ -680,45 +614,36 @@ async def resource_execute(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
@@ -732,7 +657,6 @@ async def resource_execute(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource ID.
Executes an Analytics query for data in the context of a resource. `Here
@@ -764,45 +688,36 @@ async def resource_execute(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
@@ -810,7 +725,6 @@ async def resource_execute(
async def resource_execute(
self, resource_id: str, body: Union[JSON, IO[bytes]], *, prefer: Optional[str] = None, **kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource ID.
Executes an Analytics query for data in the context of a resource. `Here
@@ -835,13 +749,10 @@ async def resource_execute(
# JSON input template you can fill out and use as your body input.
body = {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The timespan over which to query
- data. This is an ISO8601 time period value. This timespan is applied in addition
- to any that are specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to query in addition to the
- primary workspace.
+ "str"
]
}
@@ -851,48 +762,39 @@ async def resource_execute(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -933,8 +835,6 @@ async def resource_execute(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -950,7 +850,6 @@ async def resource_execute(
@overload
async def batch(self, body: JSON, *, content_type: str = "application/json", **kwargs: Any) -> JSON:
- # pylint: disable=line-too-long
"""Execute a batch of Analytics queries.
Executes a batch of Analytics queries for data. `Here
@@ -974,30 +873,19 @@ async def batch(self, body: JSON, *, content_type: str = "application/json", **k
"requests": [
{
"body": {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The
- timespan over which to query data. This is an ISO8601 time period
- value. This timespan is applied in addition to any that are
- specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to
- query in addition to the primary workspace.
+ "str"
]
},
- "id": "str", # Unique ID corresponding to each request in
- the batch. Required.
- "workspace": "str", # Primary Workspace ID of the query.
- This is the Workspace ID from the Properties blade in the Azure portal.
- Required.
+ "id": "str",
+ "workspace": "str",
"headers": {
- "str": "str" # Optional. Headers of the request. Can
- use prefer header to set server timeout and to query statistics and
- visualization information.
+ "str": "str"
},
- "method": "POST", # Optional. Default value is "POST". The
- method of a single request in a batch, defaults to POST.
- "path": "/query" # Optional. Default value is "/query". The
- query path of a single request in a batch, defaults to /query.
+ "method": "POST",
+ "path": "/query"
}
]
}
@@ -1008,66 +896,45 @@ async def batch(self, body: JSON, *, content_type: str = "application/json", **k
{
"body": {
"error": {
- "code": "str", # A machine readable error
- code. Required.
- "message": "str", # A human readable error
- message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's
- code. Required.
- "message": "str", # A human
- readable error message. Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional.
- Indicates resources which were responsible for the
- error.
+ "str"
],
- "target": "str", # Optional.
- Indicates which property in the request is responsible
- for the error.
- "value": "str" # Optional.
- Indicates which value in 'target' is responsible for the
- error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON
- format.
- "statistics": {}, # Optional. Statistics represented
- in JSON format.
+ "render": {},
+ "statistics": {},
"tables": [
{
"columns": [
{
- "name": "str", # The
- name of this column. Required.
- "type": "str" # The
- data type of this column. Required. Known values are:
- "bool", "datetime", "dynamic", "int", "long", "real",
- "string", "guid", "decimal", and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the
- table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting
- rows from this query. Required.
+ {}
]
]
}
]
},
"headers": {
- "str": "str" # Optional. Dictionary of
- :code:``.
+ "str": "str"
},
- "id": "str", # Optional. An array of responses corresponding
- to each individual request in a batch.
- "status": 0 # Optional. An array of responses corresponding
- to each individual request in a batch.
+ "id": "str",
+ "status": 0
}
]
}
@@ -1075,7 +942,6 @@ async def batch(self, body: JSON, *, content_type: str = "application/json", **k
@overload
async def batch(self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any) -> JSON:
- # pylint: disable=line-too-long
"""Execute a batch of Analytics queries.
Executes a batch of Analytics queries for data. `Here
@@ -1100,66 +966,45 @@ async def batch(self, body: IO[bytes], *, content_type: str = "application/json"
{
"body": {
"error": {
- "code": "str", # A machine readable error
- code. Required.
- "message": "str", # A human readable error
- message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's
- code. Required.
- "message": "str", # A human
- readable error message. Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional.
- Indicates resources which were responsible for the
- error.
+ "str"
],
- "target": "str", # Optional.
- Indicates which property in the request is responsible
- for the error.
- "value": "str" # Optional.
- Indicates which value in 'target' is responsible for the
- error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON
- format.
- "statistics": {}, # Optional. Statistics represented
- in JSON format.
+ "render": {},
+ "statistics": {},
"tables": [
{
"columns": [
{
- "name": "str", # The
- name of this column. Required.
- "type": "str" # The
- data type of this column. Required. Known values are:
- "bool", "datetime", "dynamic", "int", "long", "real",
- "string", "guid", "decimal", and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the
- table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting
- rows from this query. Required.
+ {}
]
]
}
]
},
"headers": {
- "str": "str" # Optional. Dictionary of
- :code:``.
+ "str": "str"
},
- "id": "str", # Optional. An array of responses corresponding
- to each individual request in a batch.
- "status": 0 # Optional. An array of responses corresponding
- to each individual request in a batch.
+ "id": "str",
+ "status": 0
}
]
}
@@ -1167,7 +1012,6 @@ async def batch(self, body: IO[bytes], *, content_type: str = "application/json"
@distributed_trace_async
async def batch(self, body: Union[JSON, IO[bytes]], **kwargs: Any) -> JSON:
- # pylint: disable=line-too-long
"""Execute a batch of Analytics queries.
Executes a batch of Analytics queries for data. `Here
@@ -1188,30 +1032,19 @@ async def batch(self, body: Union[JSON, IO[bytes]], **kwargs: Any) -> JSON:
"requests": [
{
"body": {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The
- timespan over which to query data. This is an ISO8601 time period
- value. This timespan is applied in addition to any that are
- specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to
- query in addition to the primary workspace.
+ "str"
]
},
- "id": "str", # Unique ID corresponding to each request in
- the batch. Required.
- "workspace": "str", # Primary Workspace ID of the query.
- This is the Workspace ID from the Properties blade in the Azure portal.
- Required.
+ "id": "str",
+ "workspace": "str",
"headers": {
- "str": "str" # Optional. Headers of the request. Can
- use prefer header to set server timeout and to query statistics and
- visualization information.
+ "str": "str"
},
- "method": "POST", # Optional. Default value is "POST". The
- method of a single request in a batch, defaults to POST.
- "path": "/query" # Optional. Default value is "/query". The
- query path of a single request in a batch, defaults to /query.
+ "method": "POST",
+ "path": "/query"
}
]
}
@@ -1222,71 +1055,50 @@ async def batch(self, body: Union[JSON, IO[bytes]], **kwargs: Any) -> JSON:
{
"body": {
"error": {
- "code": "str", # A machine readable error
- code. Required.
- "message": "str", # A human readable error
- message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's
- code. Required.
- "message": "str", # A human
- readable error message. Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional.
- Indicates resources which were responsible for the
- error.
+ "str"
],
- "target": "str", # Optional.
- Indicates which property in the request is responsible
- for the error.
- "value": "str" # Optional.
- Indicates which value in 'target' is responsible for the
- error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON
- format.
- "statistics": {}, # Optional. Statistics represented
- in JSON format.
+ "render": {},
+ "statistics": {},
"tables": [
{
"columns": [
{
- "name": "str", # The
- name of this column. Required.
- "type": "str" # The
- data type of this column. Required. Known values are:
- "bool", "datetime", "dynamic", "int", "long", "real",
- "string", "guid", "decimal", and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the
- table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting
- rows from this query. Required.
+ {}
]
]
}
]
},
"headers": {
- "str": "str" # Optional. Dictionary of
- :code:``.
+ "str": "str"
},
- "id": "str", # Optional. An array of responses corresponding
- to each individual request in a batch.
- "status": 0 # Optional. An array of responses corresponding
- to each individual request in a batch.
+ "id": "str",
+ "status": 0
}
]
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1325,8 +1137,6 @@ async def batch(self, body: Union[JSON, IO[bytes]], **kwargs: Any) -> JSON:
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -1344,7 +1154,6 @@ async def batch(self, body: Union[JSON, IO[bytes]], **kwargs: Any) -> JSON:
async def resource_get_xms(
self, resource_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource URI.
Executes an Analytics query for data in the context of a resource. `Here
@@ -1374,48 +1183,39 @@ async def resource_get_xms(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1445,8 +1245,6 @@ async def resource_get_xms(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -1470,7 +1268,6 @@ async def resource_execute_xms(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource ID.
Executes an Analytics query for data in the context of a resource. `Here
@@ -1498,13 +1295,10 @@ async def resource_execute_xms(
# JSON input template you can fill out and use as your body input.
body = {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The timespan over which to query
- data. This is an ISO8601 time period value. This timespan is applied in addition
- to any that are specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to query in addition to the
- primary workspace.
+ "str"
]
}
@@ -1514,45 +1308,36 @@ async def resource_execute_xms(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
@@ -1566,7 +1351,6 @@ async def resource_execute_xms(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource ID.
Executes an Analytics query for data in the context of a resource. `Here
@@ -1598,45 +1382,36 @@ async def resource_execute_xms(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
@@ -1644,7 +1419,6 @@ async def resource_execute_xms(
async def resource_execute_xms(
self, resource_id: str, body: Union[JSON, IO[bytes]], *, prefer: Optional[str] = None, **kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource ID.
Executes an Analytics query for data in the context of a resource. `Here
@@ -1669,13 +1443,10 @@ async def resource_execute_xms(
# JSON input template you can fill out and use as your body input.
body = {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The timespan over which to query
- data. This is an ISO8601 time period value. This timespan is applied in addition
- to any that are specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to query in addition to the
- primary workspace.
+ "str"
]
}
@@ -1685,48 +1456,39 @@ async def resource_execute_xms(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1767,8 +1529,6 @@ async def resource_execute_xms(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -1802,7 +1562,6 @@ def __init__(self, *args, **kwargs) -> None:
@distributed_trace_async
async def get(self, workspace_id: str, **kwargs: Any) -> JSON:
- # pylint: disable=line-too-long
"""Gets metadata information.
Retrieve the metadata information for the workspace, including its schema, functions, workspace
@@ -1822,122 +1581,93 @@ async def get(self, workspace_id: str, **kwargs: Any) -> JSON:
response == {
"applications": [
{
- "id": "str", # The ID of the Application Insights app.
- Required.
- "name": "str", # The name of the Application Insights app.
- Required.
- "region": "str", # The Azure region of the Application
- Insights app. Required.
- "resourceId": "str", # The ARM resource ID of the
- Application Insights app. Required.
+ "id": "str",
+ "name": "str",
+ "region": "str",
+ "resourceId": "str",
"related": {
"functions": [
- "str" # Optional. The related functions for
- the Application Insights app.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- Application Insights app.
+ "str"
]
}
}
],
"categories": [
{
- "displayName": "str", # The display name of the category.
- Required.
- "id": "str", # The ID of the category. Required.
- "description": "str", # Optional. The description of the
- category.
+ "displayName": "str",
+ "id": "str",
+ "description": "str",
"related": {
"functions": [
- "str" # Optional. The functions related to
- the category.
+ "str"
],
"queries": [
- "str" # Optional. The saved queries related
- to the category.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The resource types related
- to the category.
+ "str"
],
"solutions": [
- "str" # Optional. The Log Analytics
- solutions related to the category.
+ "str"
],
"tables": [
- "str" # Optional. The tables related to the
- category.
+ "str"
]
}
}
],
"functions": [
{
- "body": "str", # The KQL body of the function. Required.
- "id": "str", # The ID of the function. Required.
- "name": "str", # The name of the function, to be used in
- queries. Required.
- "description": "str", # Optional. The description of the
- function.
- "displayName": "str", # Optional. The display name of the
- function.
- "parameters": "str", # Optional. The parameters/arguments of
- the function, if any.
- "properties": {}, # Optional. The properties of the
- function.
+ "body": "str",
+ "id": "str",
+ "name": "str",
+ "description": "str",
+ "displayName": "str",
+ "parameters": "str",
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The related categories for
- the function.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the function.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the function.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- function.
+ "str"
],
"workspaces": [
- "str" # Optional. The related workspaces for
- the function.
+ "str"
]
},
- "tags": {} # Optional. The tags associated with the
- function.
+ "tags": {}
}
],
"permissions": [
{
"workspaces": [
{
- "resourceId": "str", # The resource ID on
- the permission indication. Required.
+ "resourceId": "str",
"denyTables": [
- "str" # Optional. The list of tables
- that were denied access for the resource ID.
+ "str"
]
}
],
"applications": [
{
- "resourceId": "str" # The resource ID on the
- permission indication. Required.
+ "resourceId": "str"
}
],
"resources": [
{
- "resourceId": "str", # The resource ID on
- the permission indication. Required.
+ "resourceId": "str",
"denyTables": [
- "str" # Optional. The list of tables
- that were denied access for the resource ID.
+ "str"
]
}
]
@@ -1945,221 +1675,164 @@ async def get(self, workspace_id: str, **kwargs: Any) -> JSON:
],
"queries": [
{
- "body": "str", # The KQL body of the query. Required.
- "id": "str", # The ID of the query. Required.
- "description": "str", # Optional. The description of the
- query.
- "displayName": "str", # Optional. The display name of the
- query.
+ "body": "str",
+ "id": "str",
+ "description": "str",
+ "displayName": "str",
"labels": [
- "str" # Optional. The user defined labels associated
- with the query.
+ "str"
],
- "properties": {}, # Optional. The properties of the query.
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The related categories for
- the query.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the query.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the query.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- query.
+ "str"
]
},
- "tags": {} # Optional. The tags associated with the query.
+ "tags": {}
}
],
"resourceTypes": [
{
- "id": "str", # The ID of the resource-type. Required.
- "type": "str", # The type of the resource-type. Required.
- "description": "str", # Optional. The description of the
- resource-type.
- "displayName": "str", # Optional. The display name of the
- resource-type.
+ "id": "str",
+ "type": "str",
+ "description": "str",
+ "displayName": "str",
"labels": [
- "str" # Optional. The user-defined labels of the
- resource-type.
+ "str"
],
- "properties": {}, # Optional. The properties of the
- resource-type.
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The categories related to
- the resource-type.
+ "str"
],
"functions": [
- "str" # Optional. The functions related to
- the resource-type.
+ "str"
],
"queries": [
- "str" # Optional. The queries related to the
- resource-type.
+ "str"
],
"resources": [
- "str" # Optional. The Azure resources
- related to the resource-type.
+ "str"
],
"tables": [
- "str" # Optional. The tables related to the
- resource-type.
+ "str"
],
"workspaces": [
- "str" # Optional. The Log Analytics
- workspaces related to the resource-type.
+ "str"
]
},
- "tags": {} # Optional. The tags associated with the
- resource-type.
+ "tags": {}
}
],
"resources": [
- {} # Optional. The list of Azure resources that were referenced in
- the metadata request.
+ {}
],
"solutions": [
{
- "id": "str", # The ID of the Log Analytics solution.
- Required.
- "name": "str", # The name of the Log Analytics solution.
- Required.
+ "id": "str",
+ "name": "str",
"related": {
"tables": [
- "str" # The tables related to the Log
- Analytics solution. Required.
+ "str"
],
"categories": [
- "str" # Optional. The categories related to
- the Log Analytics solution.
+ "str"
],
"functions": [
- "str" # Optional. The functions related to
- the Log Analytics solution.
+ "str"
],
"queries": [
- "str" # Optional. The saved queries related
- to the Log Analytics solution.
+ "str"
],
"workspaces": [
- "str" # Optional. The Workspaces referenced
- in the metadata request that are related to the Log Analytics
- solution.
+ "str"
]
},
- "description": "str", # Optional. The description of the Log
- Analytics solution.
- "displayName": "str", # Optional. The display name of the
- Log Analytics solution.
- "properties": {}, # Optional. The properties of the Log
- Analytics solution.
- "tags": {} # Optional. The tags that are associated with the
- Log Analytics solution.
+ "description": "str",
+ "displayName": "str",
+ "properties": {},
+ "tags": {}
}
],
"tables": [
{
- "id": "str", # The ID of the table. Required.
- "name": "str", # The name of the table. Required.
+ "id": "str",
+ "name": "str",
"columns": [
{
- "name": "str", # The name of the column.
- Required.
- "type": "str", # The data type of the
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
- "description": "str", # Optional. The
- description of the column.
- "isPreferredFacet": bool, # Optional. A flag
- indicating this column is a preferred facet.
- "source": {} # Optional. an indication of
- the source of the column, used only when multiple workspaces have
- conflicting definition for the column.
+ "name": "str",
+ "type": "str",
+ "description": "str",
+ "isPreferredFacet": bool,
+ "source": {}
}
],
- "description": "str", # Optional. The description of the
- table.
+ "description": "str",
"labels": [
- "str" # Optional. The user defined labels of the
- table.
+ "str"
],
- "properties": {}, # Optional. The properties of the table.
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The related categories for
- the table.
+ "str"
],
"functions": [
- "str" # Optional. The related functions for
- the table.
+ "str"
],
"queries": [
- "str" # Optional. The related saved queries
- for the table.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the table.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the table.
+ "str"
],
"workspaces": [
- "str" # Optional. The related Log Analytics
- workspaces for the table.
+ "str"
]
},
- "tags": {}, # Optional. The tags associated with the table.
- "timespanColumn": "str" # Optional. The column associated
- with the timespan query parameter for the table.
+ "tags": {},
+ "timespanColumn": "str"
}
],
"workspaces": [
{
- "id": "str", # The ID of the Log Analytics workspace.
- Required.
- "name": "str", # The name of the Log Analytics workspace.
- Required.
- "region": "str", # The Azure region of the Log Analytics
- workspace. Required.
- "resourceId": "str", # The ARM resource ID of the Log
- Analytics workspace. Required.
+ "id": "str",
+ "name": "str",
+ "region": "str",
+ "resourceId": "str",
"related": {
"functions": [
- "str" # Optional. The related functions for
- the Log Analytics workspace.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the Log Analytics workspace.
+ "str"
],
"resources": [
- "str" # Optional. The related Azure
- resources for the Log Analytics workspace.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the Log Analytics workspace.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- Log Analytics workspace.
+ "str"
]
}
}
]
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2187,8 +1860,6 @@ async def get(self, workspace_id: str, **kwargs: Any) -> JSON:
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -2204,7 +1875,6 @@ async def get(self, workspace_id: str, **kwargs: Any) -> JSON:
@distributed_trace_async
async def post(self, workspace_id: str, **kwargs: Any) -> JSON:
- # pylint: disable=line-too-long
"""Gets metadata information.
Retrieve the metadata information for the workspace, including its schema, functions, workspace
@@ -2224,122 +1894,93 @@ async def post(self, workspace_id: str, **kwargs: Any) -> JSON:
response == {
"applications": [
{
- "id": "str", # The ID of the Application Insights app.
- Required.
- "name": "str", # The name of the Application Insights app.
- Required.
- "region": "str", # The Azure region of the Application
- Insights app. Required.
- "resourceId": "str", # The ARM resource ID of the
- Application Insights app. Required.
+ "id": "str",
+ "name": "str",
+ "region": "str",
+ "resourceId": "str",
"related": {
"functions": [
- "str" # Optional. The related functions for
- the Application Insights app.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- Application Insights app.
+ "str"
]
}
}
],
"categories": [
{
- "displayName": "str", # The display name of the category.
- Required.
- "id": "str", # The ID of the category. Required.
- "description": "str", # Optional. The description of the
- category.
+ "displayName": "str",
+ "id": "str",
+ "description": "str",
"related": {
"functions": [
- "str" # Optional. The functions related to
- the category.
+ "str"
],
"queries": [
- "str" # Optional. The saved queries related
- to the category.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The resource types related
- to the category.
+ "str"
],
"solutions": [
- "str" # Optional. The Log Analytics
- solutions related to the category.
+ "str"
],
"tables": [
- "str" # Optional. The tables related to the
- category.
+ "str"
]
}
}
],
"functions": [
{
- "body": "str", # The KQL body of the function. Required.
- "id": "str", # The ID of the function. Required.
- "name": "str", # The name of the function, to be used in
- queries. Required.
- "description": "str", # Optional. The description of the
- function.
- "displayName": "str", # Optional. The display name of the
- function.
- "parameters": "str", # Optional. The parameters/arguments of
- the function, if any.
- "properties": {}, # Optional. The properties of the
- function.
+ "body": "str",
+ "id": "str",
+ "name": "str",
+ "description": "str",
+ "displayName": "str",
+ "parameters": "str",
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The related categories for
- the function.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the function.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the function.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- function.
+ "str"
],
"workspaces": [
- "str" # Optional. The related workspaces for
- the function.
+ "str"
]
},
- "tags": {} # Optional. The tags associated with the
- function.
+ "tags": {}
}
],
"permissions": [
{
"workspaces": [
{
- "resourceId": "str", # The resource ID on
- the permission indication. Required.
+ "resourceId": "str",
"denyTables": [
- "str" # Optional. The list of tables
- that were denied access for the resource ID.
+ "str"
]
}
],
"applications": [
{
- "resourceId": "str" # The resource ID on the
- permission indication. Required.
+ "resourceId": "str"
}
],
"resources": [
{
- "resourceId": "str", # The resource ID on
- the permission indication. Required.
+ "resourceId": "str",
"denyTables": [
- "str" # Optional. The list of tables
- that were denied access for the resource ID.
+ "str"
]
}
]
@@ -2347,221 +1988,164 @@ async def post(self, workspace_id: str, **kwargs: Any) -> JSON:
],
"queries": [
{
- "body": "str", # The KQL body of the query. Required.
- "id": "str", # The ID of the query. Required.
- "description": "str", # Optional. The description of the
- query.
- "displayName": "str", # Optional. The display name of the
- query.
+ "body": "str",
+ "id": "str",
+ "description": "str",
+ "displayName": "str",
"labels": [
- "str" # Optional. The user defined labels associated
- with the query.
+ "str"
],
- "properties": {}, # Optional. The properties of the query.
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The related categories for
- the query.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the query.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the query.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- query.
+ "str"
]
},
- "tags": {} # Optional. The tags associated with the query.
+ "tags": {}
}
],
"resourceTypes": [
{
- "id": "str", # The ID of the resource-type. Required.
- "type": "str", # The type of the resource-type. Required.
- "description": "str", # Optional. The description of the
- resource-type.
- "displayName": "str", # Optional. The display name of the
- resource-type.
+ "id": "str",
+ "type": "str",
+ "description": "str",
+ "displayName": "str",
"labels": [
- "str" # Optional. The user-defined labels of the
- resource-type.
+ "str"
],
- "properties": {}, # Optional. The properties of the
- resource-type.
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The categories related to
- the resource-type.
+ "str"
],
"functions": [
- "str" # Optional. The functions related to
- the resource-type.
+ "str"
],
"queries": [
- "str" # Optional. The queries related to the
- resource-type.
+ "str"
],
"resources": [
- "str" # Optional. The Azure resources
- related to the resource-type.
+ "str"
],
"tables": [
- "str" # Optional. The tables related to the
- resource-type.
+ "str"
],
"workspaces": [
- "str" # Optional. The Log Analytics
- workspaces related to the resource-type.
+ "str"
]
},
- "tags": {} # Optional. The tags associated with the
- resource-type.
+ "tags": {}
}
],
"resources": [
- {} # Optional. The list of Azure resources that were referenced in
- the metadata request.
+ {}
],
"solutions": [
{
- "id": "str", # The ID of the Log Analytics solution.
- Required.
- "name": "str", # The name of the Log Analytics solution.
- Required.
+ "id": "str",
+ "name": "str",
"related": {
"tables": [
- "str" # The tables related to the Log
- Analytics solution. Required.
+ "str"
],
"categories": [
- "str" # Optional. The categories related to
- the Log Analytics solution.
+ "str"
],
"functions": [
- "str" # Optional. The functions related to
- the Log Analytics solution.
+ "str"
],
"queries": [
- "str" # Optional. The saved queries related
- to the Log Analytics solution.
+ "str"
],
"workspaces": [
- "str" # Optional. The Workspaces referenced
- in the metadata request that are related to the Log Analytics
- solution.
+ "str"
]
},
- "description": "str", # Optional. The description of the Log
- Analytics solution.
- "displayName": "str", # Optional. The display name of the
- Log Analytics solution.
- "properties": {}, # Optional. The properties of the Log
- Analytics solution.
- "tags": {} # Optional. The tags that are associated with the
- Log Analytics solution.
+ "description": "str",
+ "displayName": "str",
+ "properties": {},
+ "tags": {}
}
],
"tables": [
{
- "id": "str", # The ID of the table. Required.
- "name": "str", # The name of the table. Required.
+ "id": "str",
+ "name": "str",
"columns": [
{
- "name": "str", # The name of the column.
- Required.
- "type": "str", # The data type of the
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
- "description": "str", # Optional. The
- description of the column.
- "isPreferredFacet": bool, # Optional. A flag
- indicating this column is a preferred facet.
- "source": {} # Optional. an indication of
- the source of the column, used only when multiple workspaces have
- conflicting definition for the column.
+ "name": "str",
+ "type": "str",
+ "description": "str",
+ "isPreferredFacet": bool,
+ "source": {}
}
],
- "description": "str", # Optional. The description of the
- table.
+ "description": "str",
"labels": [
- "str" # Optional. The user defined labels of the
- table.
+ "str"
],
- "properties": {}, # Optional. The properties of the table.
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The related categories for
- the table.
+ "str"
],
"functions": [
- "str" # Optional. The related functions for
- the table.
+ "str"
],
"queries": [
- "str" # Optional. The related saved queries
- for the table.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the table.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the table.
+ "str"
],
"workspaces": [
- "str" # Optional. The related Log Analytics
- workspaces for the table.
+ "str"
]
},
- "tags": {}, # Optional. The tags associated with the table.
- "timespanColumn": "str" # Optional. The column associated
- with the timespan query parameter for the table.
+ "tags": {},
+ "timespanColumn": "str"
}
],
"workspaces": [
{
- "id": "str", # The ID of the Log Analytics workspace.
- Required.
- "name": "str", # The name of the Log Analytics workspace.
- Required.
- "region": "str", # The Azure region of the Log Analytics
- workspace. Required.
- "resourceId": "str", # The ARM resource ID of the Log
- Analytics workspace. Required.
+ "id": "str",
+ "name": "str",
+ "region": "str",
+ "resourceId": "str",
"related": {
"functions": [
- "str" # Optional. The related functions for
- the Log Analytics workspace.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the Log Analytics workspace.
+ "str"
],
"resources": [
- "str" # Optional. The related Azure
- resources for the Log Analytics workspace.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the Log Analytics workspace.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- Log Analytics workspace.
+ "str"
]
}
}
]
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2589,8 +2173,6 @@ async def post(self, workspace_id: str, **kwargs: Any) -> JSON:
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/__init__.py
index 47183c3ee517..7e503e3d6846 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/__init__.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._client import MonitorMetricsClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._client import MonitorMetricsClient # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"MonitorMetricsClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_client.py
index 888ad95aaf75..8c0ad4537a9e 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_client.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_client.py
@@ -8,6 +8,7 @@
from copy import deepcopy
from typing import Any
+from typing_extensions import Self
from azure.core import PipelineClient
from azure.core.pipeline import policies
@@ -18,7 +19,7 @@
from .operations import MetricDefinitionsOperations, MetricNamespacesOperations, MetricsOperations
-class MonitorMetricsClient: # pylint: disable=client-accepts-api-version-keyword
+class MonitorMetricsClient:
"""Azure Monitor Metrics Python Client.
:ivar metric_definitions: MetricDefinitionsOperations operations
@@ -93,7 +94,7 @@ def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs:
def close(self) -> None:
self._client.close()
- def __enter__(self) -> "MonitorMetricsClient":
+ def __enter__(self) -> Self:
self._client.__enter__()
return self
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_configuration.py
index 4fb1b263e1ea..e84c97f5a561 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_configuration.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_configuration.py
@@ -13,7 +13,7 @@
VERSION = "unknown"
-class MonitorMetricsClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class MonitorMetricsClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for MonitorMetricsClient.
Note that all parameters used to create this instance are saved as instance
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_serialization.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_serialization.py
index 2f781d740827..e2ad51869908 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_serialization.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -52,7 +52,6 @@
MutableMapping,
Type,
List,
- Mapping,
)
try:
@@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -144,6 +145,8 @@ def _json_attemp(data):
# context otherwise.
_LOGGER.critical("Wasn't XML not JSON, failing")
raise DeserializationError("XML is invalid") from err
+ elif content_type.startswith("text/"):
+ return data_as_str
raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
@classmethod
@@ -153,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -182,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -204,7 +227,7 @@ class _FixedOffset(datetime.tzinfo): # type: ignore
:param datetime.timedelta offset: offset in timedelta format
"""
- def __init__(self, offset):
+ def __init__(self, offset) -> None:
self.__offset = offset
def utcoffset(self, dt):
@@ -233,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
self.additional_properties: Optional[Dict[str, Any]] = {}
- for k in kwargs:
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -298,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None:
setattr(self, k, kwargs[k])
def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
def __str__(self) -> str:
@@ -324,7 +366,11 @@ def is_xml_model(cls) -> bool:
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -344,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
def as_dict(
self,
@@ -378,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -393,7 +444,7 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@@ -406,6 +457,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@@ -424,9 +476,11 @@ def from_dict(
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
deserializer.key_extractors = ( # type: ignore
@@ -446,21 +500,25 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
- Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
if not isinstance(response, ET.Element):
rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
else:
subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
if subtype_value:
@@ -499,11 +557,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer(object): # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -538,7 +598,7 @@ class Serializer(object):
"multiple": lambda x, y: x % y != 0,
}
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.serialize_type = {
"iso-8601": Serializer.serialize_iso,
"rfc-1123": Serializer.serialize_rfc,
@@ -558,13 +618,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -590,12 +653,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -631,7 +696,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -662,17 +728,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
raise SerializationError(msg) from err
- else:
- return serialized
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
@@ -701,7 +767,7 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
raise SerializationError("Unable to build a model: " + str(err)) from err
@@ -710,9 +776,11 @@ def body(self, data, data_type, **kwargs):
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -726,21 +794,20 @@ def url(self, name, data, data_type, **kwargs):
output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :keyword bool skip_quote: Whether to skip quote the serialized result.
- Defaults to False.
:rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
@@ -757,19 +824,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -778,21 +846,20 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
@@ -803,7 +870,7 @@ def serialize_data(self, data, data_type, **kwargs):
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -819,11 +886,10 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
raise SerializationError(msg.format(data, data_type)) from err
- else:
- return self._serialize(data, **kwargs)
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -839,23 +905,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -869,8 +938,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -880,15 +948,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
- :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -943,9 +1009,8 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
@@ -969,7 +1034,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -977,6 +1042,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -1001,7 +1067,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1032,56 +1098,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1089,11 +1160,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1103,30 +1175,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1139,12 +1213,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1170,13 +1245,14 @@ def serialize_iso(attr, **kwargs):
raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1184,11 +1260,11 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
@@ -1209,7 +1285,9 @@ def rest_key_extractor(attr, attr_desc, data):
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1230,17 +1308,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1277,7 +1367,7 @@ def _extract_name_from_internal_type(internal_type):
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1329,22 +1419,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1361,9 +1450,9 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.deserialize_type = {
"iso-8601": Deserializer.deserialize_iso,
"rfc-1123": Deserializer.deserialize_rfc,
@@ -1401,11 +1490,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1414,12 +1504,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1438,13 +1529,13 @@ def _deserialize(self, target_obj, data):
if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
- if data is None:
+ if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1474,9 +1565,8 @@ def _deserialize(self, target_obj, data):
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
raise DeserializationError(msg) from err
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1503,6 +1593,8 @@ def _classify_target(self, target, data):
:param str target: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
@@ -1514,7 +1606,7 @@ def _classify_target(self, target, data):
return target, target
try:
- target = target._classify(data, self.dependencies) # type: ignore
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
return target, target.__class__.__name__ # type: ignore
@@ -1529,10 +1621,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
:param str target_obj: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1550,10 +1644,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1577,24 +1673,35 @@ def _unpack_content(raw_data, content_type=None):
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("readonly")
+ ]
+ const = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("constant")
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
setattr(response_obj, attr, attrs.get(attr))
if additional_properties:
- response_obj.additional_properties = additional_properties
+ response_obj.additional_properties = additional_properties # type: ignore
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1603,15 +1710,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1625,7 +1733,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1645,14 +1757,14 @@ def deserialize_data(self, data, data_type):
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
raise DeserializationError(msg) from err
- else:
- return self._deserialize(obj_type, data)
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1669,6 +1781,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1679,11 +1792,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1718,11 +1832,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1730,6 +1843,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1741,24 +1855,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, str):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1766,6 +1879,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1779,8 +1893,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1792,6 +1905,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1802,9 +1916,9 @@ def deserialize_enum(data, enum_obj):
# Workaround. We might consider remove it in the future.
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1820,6 +1934,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1832,6 +1947,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1847,8 +1963,9 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
@@ -1863,6 +1980,7 @@ def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1875,6 +1993,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1885,14 +2004,14 @@ def deserialize_duration(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
raise DeserializationError(msg) from err
- else:
- return duration
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1908,6 +2027,7 @@ def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1922,6 +2042,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1937,14 +2058,14 @@ def deserialize_rfc(attr):
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1974,8 +2095,7 @@ def deserialize_iso(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1983,6 +2103,7 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
@@ -1994,5 +2115,4 @@ def deserialize_unix(attr):
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/__init__.py
index 47183c3ee517..7e503e3d6846 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/__init__.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._client import MonitorMetricsClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._client import MonitorMetricsClient # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"MonitorMetricsClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_client.py
index fefa6363e0ac..99d5dbea0753 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_client.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_client.py
@@ -8,6 +8,7 @@
from copy import deepcopy
from typing import Any, Awaitable
+from typing_extensions import Self
from azure.core import AsyncPipelineClient
from azure.core.pipeline import policies
@@ -18,7 +19,7 @@
from .operations import MetricDefinitionsOperations, MetricNamespacesOperations, MetricsOperations
-class MonitorMetricsClient: # pylint: disable=client-accepts-api-version-keyword
+class MonitorMetricsClient:
"""Azure Monitor Metrics Python Client.
:ivar metric_definitions: MetricDefinitionsOperations operations
@@ -95,7 +96,7 @@ def send_request(
async def close(self) -> None:
await self._client.close()
- async def __aenter__(self) -> "MonitorMetricsClient":
+ async def __aenter__(self) -> Self:
await self._client.__aenter__()
return self
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_configuration.py
index ef23b1ac5e5d..01db55ca7f03 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_configuration.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_configuration.py
@@ -13,7 +13,7 @@
VERSION = "unknown"
-class MonitorMetricsClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class MonitorMetricsClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for MonitorMetricsClient.
Note that all parameters used to create this instance are saved as instance
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/__init__.py
index e1fc034d9277..64e613d8c69b 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/__init__.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/__init__.py
@@ -5,13 +5,19 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import MetricDefinitionsOperations
-from ._operations import MetricsOperations
-from ._operations import MetricNamespacesOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import MetricDefinitionsOperations # type: ignore
+from ._operations import MetricsOperations # type: ignore
+from ._operations import MetricNamespacesOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -19,5 +25,5 @@
"MetricsOperations",
"MetricNamespacesOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/_operations.py
index baaa9368230f..3a51ef43479e 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/_operations.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
import datetime
import sys
-from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar, cast
+from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, cast
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -35,7 +34,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -60,7 +59,6 @@ def __init__(self, *args, **kwargs) -> None:
@distributed_trace
def list(self, resource_uri: str, *, metricnamespace: Optional[str] = None, **kwargs: Any) -> AsyncIterable[JSON]:
- # pylint: disable=line-too-long
"""Lists the metric definitions for the resource.
:param resource_uri: The identifier of the resource. Required.
@@ -77,48 +75,34 @@ def list(self, resource_uri: str, *, metricnamespace: Optional[str] = None, **kw
# response body for status code(s): 200
response == {
- "category": "str", # Optional. Custom category name for this metric.
+ "category": "str",
"dimensions": [
{
- "value": "str", # The invariant value. Required.
- "localizedValue": "str" # Optional. The display name.
+ "value": "str",
+ "localizedValue": "str"
}
],
- "displayDescription": "str", # Optional. Detailed description of this
- metric.
- "id": "str", # Optional. The resource identifier of the metric definition.
- "isDimensionRequired": bool, # Optional. Flag to indicate whether the
- dimension is required.
+ "displayDescription": "str",
+ "id": "str",
+ "isDimensionRequired": bool,
"metricAvailabilities": [
{
- "retention": "1 day, 0:00:00", # Optional. The retention
- period for the metric at the specified timegrain. Expressed as a
- duration 'PT1M', 'P1D', etc.
- "timeGrain": "1 day, 0:00:00" # Optional. The time grain
- specifies a supported aggregation interval for the metric. Expressed as a
- duration 'PT1M', 'P1D', etc.
+ "retention": "1 day, 0:00:00",
+ "timeGrain": "1 day, 0:00:00"
}
],
- "metricClass": "str", # Optional. The class of the metric. Known values are:
- "Availability", "Transactions", "Errors", "Latency", and "Saturation".
+ "metricClass": "str",
"name": {
- "value": "str", # The invariant value. Required.
- "localizedValue": "str" # Optional. The display name.
+ "value": "str",
+ "localizedValue": "str"
},
- "namespace": "str", # Optional. The namespace the metric belongs to.
- "primaryAggregationType": "str", # Optional. The primary aggregation type
- value defining how to use the values for display. Known values are: "None",
- "Average", "Count", "Minimum", "Maximum", and "Total".
- "resourceId": "str", # Optional. The resource identifier of the resource
- that emitted the metric.
+ "namespace": "str",
+ "primaryAggregationType": "str",
+ "resourceId": "str",
"supportedAggregationTypes": [
- "str" # Optional. The collection of what aggregation types are
- supported.
+ "str"
],
- "unit": "str" # Optional. The unit of the metric. Known values are: "Count",
- "Bytes", "Seconds", "CountPerSecond", "BytesPerSecond", "Percent",
- "MilliSeconds", "ByteSeconds", "Unspecified", "Cores", "MilliCores", "NanoCores",
- and "BitsPerSecond".
+ "unit": "str"
}
"""
_headers = kwargs.pop("headers", {}) or {}
@@ -126,7 +110,7 @@ def list(self, resource_uri: str, *, metricnamespace: Optional[str] = None, **kw
cls: ClsType[JSON] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -180,8 +164,6 @@ async def get_next(next_link=None):
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -226,7 +208,6 @@ async def list(
rollupby: Optional[str] = None,
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""**Lists the metric values for a resource**.
:param resource_uri: The identifier of the resource. Required.
@@ -255,13 +236,13 @@ async def list(
:paramtype orderby: str
:keyword filter: The **$filter** is used to reduce the set of metric data
returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`-
- Return all time series of C where A = a1 and B = b1 or b2:code:`
`\ **$filter=A eq ‘a1’ and
- B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\ :code:`
`- Invalid variant::code:`
`\ **$filter=A
- eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\ :code:`
`This is invalid because the
- logical or operator cannot separate two different metadata names.:code:`
`- Return all time
- series where A = a1, B = b1 and C = c1::code:`
`\ **$filter=A eq ‘a1’ and B eq ‘b1’ and C eq
- ‘c1’**\ :code:`
`- Return all time series where A = a1:code:`
`\ **$filter=A eq ‘a1’ and
- B eq ‘\ *’ and C eq ‘*\ ’**. Default value is None.
+ Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **$filter=A eq ‘a1’ and
+ B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\
+ **$filter=A eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid
+ because the logical or operator cannot separate two different metadata names.:code:`
`-
+ Return all time series where A = a1, B = b1 and C = c1::code:`
`\\ **$filter=A eq ‘a1’ and B
+ eq ‘b1’ and C eq ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\
+ **$filter=A eq ‘a1’ and B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None.
:paramtype filter: str
:keyword result_type: Reduces the set of data collected. The syntax allowed depends on the
operation. See the operation's description for details. Known values are: "Data" and
@@ -293,81 +274,53 @@ async def list(
# response body for status code(s): 200
response == {
- "timespan": "str", # The timespan for which the data was retrieved. Its
- value consists of two datetimes concatenated, separated by '/'. This may be
- adjusted in the future and returned back from what was originally requested.
- Required.
+ "timespan": "str",
"value": [
{
- "id": "str", # The metric Id. Required.
+ "id": "str",
"name": {
- "value": "str", # The invariant value. Required.
- "localizedValue": "str" # Optional. The display
- name.
+ "value": "str",
+ "localizedValue": "str"
},
"timeseries": [
{
"data": [
{
"timeStamp": "2020-02-20
- 00:00:00", # The timestamp for the metric value in ISO
- 8601 format. Required.
- "average": 0.0, # Optional.
- The average value in the time range.
- "count": 0.0, # Optional.
- The number of samples in the time range. Can be used to
- determine the number of values that contributed to the
- average value.
- "maximum": 0.0, # Optional.
- The greatest value in the time range.
- "minimum": 0.0, # Optional.
- The least value in the time range.
- "total": 0.0 # Optional. The
- sum of all of the values in the time range.
+ 00:00:00",
+ "average": 0.0,
+ "count": 0.0,
+ "maximum": 0.0,
+ "minimum": 0.0,
+ "total": 0.0
}
],
"metadatavalues": [
{
"name": {
- "value": "str", #
- The invariant value. Required.
+ "value": "str",
"localizedValue":
- "str" # Optional. The display name.
+ "str"
},
- "value": "str" # Optional.
- The value of the metadata.
+ "value": "str"
}
]
}
],
- "type": "str", # The resource type of the metric resource.
- Required.
- "unit": "str", # The unit of the metric. Required. Known
- values are: "Count", "Bytes", "Seconds", "CountPerSecond",
- "BytesPerSecond", "Percent", "MilliSeconds", "ByteSeconds",
- "Unspecified", "Cores", "MilliCores", "NanoCores", and "BitsPerSecond".
- "displayDescription": "str", # Optional. Detailed
- description of this metric.
- "errorCode": "str", # Optional. 'Success' or the error
- details on query failures for this metric.
- "errorMessage": "str" # Optional. Error message encountered
- querying this specific metric.
+ "type": "str",
+ "unit": "str",
+ "displayDescription": "str",
+ "errorCode": "str",
+ "errorMessage": "str"
}
],
- "cost": 0, # Optional. The integer value representing the relative cost of
- the query.
- "interval": "str", # Optional. The interval (window size) for which the
- metric data was returned in ISO 8601 duration format with a special case for
- 'FULL' value that returns single datapoint for entire time span requested (""
- *Examples: PT15M, PT1H, P1D, FULL*"" ). This may be adjusted and different from
- what was originally requested if AutoAdjustTimegrain=true is specified. This is
- not present if a metadata request was made.
- "namespace": "str", # Optional. The namespace of the metrics being queried.
- "resourceregion": "str" # Optional. The region of the resource being queried
- for metrics.
+ "cost": 0,
+ "interval": "str",
+ "namespace": "str",
+ "resourceregion": "str"
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -408,8 +361,6 @@ async def list(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -459,14 +410,13 @@ def list(self, resource_uri: str, *, start_time: Optional[str] = None, **kwargs:
# response body for status code(s): 200
response == {
- "classification": "str", # Optional. Kind of namespace. Known values are:
- "Platform", "Custom", and "Qos".
- "id": "str", # Optional. The ID of the metric namespace.
- "name": "str", # Optional. The escaped name of the namespace.
+ "classification": "str",
+ "id": "str",
+ "name": "str",
"properties": {
- "metricNamespaceName": "str" # Optional. The metric namespace name.
+ "metricNamespaceName": "str"
},
- "type": "str" # Optional. The type of the namespace.
+ "type": "str"
}
"""
_headers = kwargs.pop("headers", {}) or {}
@@ -474,7 +424,7 @@ def list(self, resource_uri: str, *, start_time: Optional[str] = None, **kwargs:
cls: ClsType[JSON] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -528,8 +478,6 @@ async def get_next(next_link=None):
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/__init__.py
index 528df49cb14c..f1f7797cdfcb 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/__init__.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._client import MonitorBatchMetricsClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._client import MonitorBatchMetricsClient # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"MonitorBatchMetricsClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_client.py
index 3574817422ed..6d5b119dc7b9 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_client.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_client.py
@@ -8,6 +8,7 @@
from copy import deepcopy
from typing import Any
+from typing_extensions import Self
from azure.core import PipelineClient
from azure.core.pipeline import policies
@@ -18,7 +19,7 @@
from .operations import MetricsBatchOperations
-class MonitorBatchMetricsClient: # pylint: disable=client-accepts-api-version-keyword
+class MonitorBatchMetricsClient:
"""Azure Monitor Batch Metrics Python Client.
:ivar metrics_batch: MetricsBatchOperations operations
@@ -90,7 +91,7 @@ def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs:
def close(self) -> None:
self._client.close()
- def __enter__(self) -> "MonitorBatchMetricsClient":
+ def __enter__(self) -> Self:
self._client.__enter__()
return self
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_configuration.py
index 16ba9df5b287..87e673a3b12b 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_configuration.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_configuration.py
@@ -13,7 +13,7 @@
VERSION = "unknown"
-class MonitorBatchMetricsClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class MonitorBatchMetricsClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for MonitorBatchMetricsClient.
Note that all parameters used to create this instance are saved as instance
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_serialization.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_serialization.py
index 2f781d740827..e2ad51869908 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_serialization.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -52,7 +52,6 @@
MutableMapping,
Type,
List,
- Mapping,
)
try:
@@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -144,6 +145,8 @@ def _json_attemp(data):
# context otherwise.
_LOGGER.critical("Wasn't XML not JSON, failing")
raise DeserializationError("XML is invalid") from err
+ elif content_type.startswith("text/"):
+ return data_as_str
raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
@classmethod
@@ -153,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -182,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -204,7 +227,7 @@ class _FixedOffset(datetime.tzinfo): # type: ignore
:param datetime.timedelta offset: offset in timedelta format
"""
- def __init__(self, offset):
+ def __init__(self, offset) -> None:
self.__offset = offset
def utcoffset(self, dt):
@@ -233,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
self.additional_properties: Optional[Dict[str, Any]] = {}
- for k in kwargs:
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -298,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None:
setattr(self, k, kwargs[k])
def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
def __str__(self) -> str:
@@ -324,7 +366,11 @@ def is_xml_model(cls) -> bool:
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -344,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
def as_dict(
self,
@@ -378,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -393,7 +444,7 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@@ -406,6 +457,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@@ -424,9 +476,11 @@ def from_dict(
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
deserializer.key_extractors = ( # type: ignore
@@ -446,21 +500,25 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
- Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
if not isinstance(response, ET.Element):
rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
else:
subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
if subtype_value:
@@ -499,11 +557,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer(object): # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -538,7 +598,7 @@ class Serializer(object):
"multiple": lambda x, y: x % y != 0,
}
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.serialize_type = {
"iso-8601": Serializer.serialize_iso,
"rfc-1123": Serializer.serialize_rfc,
@@ -558,13 +618,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -590,12 +653,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -631,7 +696,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -662,17 +728,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
raise SerializationError(msg) from err
- else:
- return serialized
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
@@ -701,7 +767,7 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
raise SerializationError("Unable to build a model: " + str(err)) from err
@@ -710,9 +776,11 @@ def body(self, data, data_type, **kwargs):
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -726,21 +794,20 @@ def url(self, name, data, data_type, **kwargs):
output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :keyword bool skip_quote: Whether to skip quote the serialized result.
- Defaults to False.
:rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
@@ -757,19 +824,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -778,21 +846,20 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
@@ -803,7 +870,7 @@ def serialize_data(self, data, data_type, **kwargs):
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -819,11 +886,10 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
raise SerializationError(msg.format(data, data_type)) from err
- else:
- return self._serialize(data, **kwargs)
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -839,23 +905,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -869,8 +938,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -880,15 +948,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
- :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -943,9 +1009,8 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
@@ -969,7 +1034,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -977,6 +1042,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -1001,7 +1067,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1032,56 +1098,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1089,11 +1160,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1103,30 +1175,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1139,12 +1213,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1170,13 +1245,14 @@ def serialize_iso(attr, **kwargs):
raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1184,11 +1260,11 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
@@ -1209,7 +1285,9 @@ def rest_key_extractor(attr, attr_desc, data):
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1230,17 +1308,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1277,7 +1367,7 @@ def _extract_name_from_internal_type(internal_type):
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1329,22 +1419,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1361,9 +1450,9 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.deserialize_type = {
"iso-8601": Deserializer.deserialize_iso,
"rfc-1123": Deserializer.deserialize_rfc,
@@ -1401,11 +1490,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1414,12 +1504,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1438,13 +1529,13 @@ def _deserialize(self, target_obj, data):
if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
- if data is None:
+ if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1474,9 +1565,8 @@ def _deserialize(self, target_obj, data):
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
raise DeserializationError(msg) from err
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1503,6 +1593,8 @@ def _classify_target(self, target, data):
:param str target: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
@@ -1514,7 +1606,7 @@ def _classify_target(self, target, data):
return target, target
try:
- target = target._classify(data, self.dependencies) # type: ignore
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
return target, target.__class__.__name__ # type: ignore
@@ -1529,10 +1621,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
:param str target_obj: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1550,10 +1644,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1577,24 +1673,35 @@ def _unpack_content(raw_data, content_type=None):
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("readonly")
+ ]
+ const = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("constant")
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
setattr(response_obj, attr, attrs.get(attr))
if additional_properties:
- response_obj.additional_properties = additional_properties
+ response_obj.additional_properties = additional_properties # type: ignore
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1603,15 +1710,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1625,7 +1733,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1645,14 +1757,14 @@ def deserialize_data(self, data, data_type):
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
raise DeserializationError(msg) from err
- else:
- return self._deserialize(obj_type, data)
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1669,6 +1781,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1679,11 +1792,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1718,11 +1832,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1730,6 +1843,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1741,24 +1855,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, str):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1766,6 +1879,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1779,8 +1893,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1792,6 +1905,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1802,9 +1916,9 @@ def deserialize_enum(data, enum_obj):
# Workaround. We might consider remove it in the future.
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1820,6 +1934,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1832,6 +1947,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1847,8 +1963,9 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
@@ -1863,6 +1980,7 @@ def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1875,6 +1993,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1885,14 +2004,14 @@ def deserialize_duration(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
raise DeserializationError(msg) from err
- else:
- return duration
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1908,6 +2027,7 @@ def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1922,6 +2042,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1937,14 +2058,14 @@ def deserialize_rfc(attr):
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1974,8 +2095,7 @@ def deserialize_iso(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1983,6 +2103,7 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
@@ -1994,5 +2115,4 @@ def deserialize_unix(attr):
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/__init__.py
index 528df49cb14c..f1f7797cdfcb 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/__init__.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._client import MonitorBatchMetricsClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._client import MonitorBatchMetricsClient # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"MonitorBatchMetricsClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_client.py
index eb91181245ef..90b421c008ce 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_client.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_client.py
@@ -8,6 +8,7 @@
from copy import deepcopy
from typing import Any, Awaitable
+from typing_extensions import Self
from azure.core import AsyncPipelineClient
from azure.core.pipeline import policies
@@ -18,7 +19,7 @@
from .operations import MetricsBatchOperations
-class MonitorBatchMetricsClient: # pylint: disable=client-accepts-api-version-keyword
+class MonitorBatchMetricsClient:
"""Azure Monitor Batch Metrics Python Client.
:ivar metrics_batch: MetricsBatchOperations operations
@@ -92,7 +93,7 @@ def send_request(
async def close(self) -> None:
await self._client.close()
- async def __aenter__(self) -> "MonitorBatchMetricsClient":
+ async def __aenter__(self) -> Self:
await self._client.__aenter__()
return self
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_configuration.py
index 579e30041e20..edd48bcec8b1 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_configuration.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_configuration.py
@@ -13,7 +13,7 @@
VERSION = "unknown"
-class MonitorBatchMetricsClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class MonitorBatchMetricsClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for MonitorBatchMetricsClient.
Note that all parameters used to create this instance are saved as instance
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/__init__.py
index 0681a236948f..3a8ca7f7491d 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/__init__.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import MetricsBatchOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import MetricsBatchOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"MetricsBatchOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/_operations.py
index 6e1ceb94248a..ac2d727ced4d 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/_operations.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -9,7 +8,7 @@
import datetime
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, List, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -29,7 +28,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -71,7 +70,6 @@ async def batch(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Lists the metric values for multiple resources.
:param subscription_id: The subscription identifier for the resources in this batch. Required.
@@ -109,13 +107,13 @@ async def batch(
:paramtype orderby: str
:keyword filter: The filter is used to reduce the set of metric data
returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`-
- Return all time series of C where A = a1 and B = b1 or b2:code:`
`\ **filter=A eq ‘a1’ and B
- eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\ :code:`
`- Invalid variant::code:`
`\ **filter=A eq
- ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\ :code:`
`This is invalid because the logical
- or operator cannot separate two different metadata names.:code:`
`- Return all time series
- where A = a1, B = b1 and C = c1::code:`
`\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq ‘c1’**\
- :code:`
`- Return all time series where A = a1:code:`
`\ **filter=A eq ‘a1’ and B eq ‘\
- *’ and C eq ‘*\ ’**. Default value is None.
+ Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **filter=A eq ‘a1’ and
+ B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ **filter=A
+ eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid because the
+ logical or operator cannot separate two different metadata names.:code:`
`- Return all time
+ series where A = a1, B = b1 and C = c1::code:`
`\\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq
+ ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ **filter=A eq ‘a1’ and
+ B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None.
:paramtype filter: str
:keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see
metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see
@@ -135,7 +133,7 @@ async def batch(
# JSON input template you can fill out and use as your body input.
batch_request = {
"resourceids": [
- "str" # Optional. The list of resource IDs to query metrics for.
+ "str"
]
}
@@ -143,85 +141,54 @@ async def batch(
response == {
"values": [
{
- "endtime": "str", # The end time, in datetime format, for
- which the data was retrieved. Required.
- "starttime": "str", # The start time, in datetime format,
- for which the data was retrieved. Required.
+ "endtime": "str",
+ "starttime": "str",
"value": [
{
- "id": "str", # The metric Id. Required.
+ "id": "str",
"name": {
- "value": "str", # The invariant
- value. Required.
- "localizedValue": "str" # Optional.
- The display name.
+ "value": "str",
+ "localizedValue": "str"
},
"timeseries": [
{
"data": [
{
"timeStamp":
- "2020-02-20 00:00:00", # The timestamp for the
- metric value in ISO 8601 format. Required.
+ "2020-02-20 00:00:00",
"average":
- 0.0, # Optional. The average value in the time
- range.
+ 0.0,
"count": 0.0,
- # Optional. The number of samples in the time
- range. Can be used to determine the number of
- values that contributed to the average value.
"maximum":
- 0.0, # Optional. The greatest value in the time
- range.
+ 0.0,
"minimum":
- 0.0, # Optional. The least value in the time
- range.
+ 0.0,
"total": 0.0
- # Optional. The sum of all of the values in the
- time range.
}
],
"metadatavalues": [
{
"name": {
- "value": "str", # The invariant value.
- Required.
- "localizedValue": "str" # Optional. The
- display name.
+ "value": "str",
+ "localizedValue": "str"
},
"value":
- "str" # Optional. The value of the metadata.
+ "str"
}
]
}
],
- "type": "str", # The resource type of the
- metric resource. Required.
- "unit": "str", # The unit of the metric.
- Required. Known values are: "Count", "Bytes", "Seconds",
- "CountPerSecond", "BytesPerSecond", "Percent", "MilliSeconds",
- "ByteSeconds", "Unspecified", "Cores", "MilliCores", "NanoCores",
- and "BitsPerSecond".
- "displayDescription": "str", # Optional.
- Detailed description of this metric.
- "errorCode": "str", # Optional. 'Success' or
- the error details on query failures for this metric.
- "errorMessage": "str" # Optional. Error
- message encountered querying this specific metric.
+ "type": "str",
+ "unit": "str",
+ "displayDescription": "str",
+ "errorCode": "str",
+ "errorMessage": "str"
}
],
- "interval": "str", # Optional. The interval (window size)
- for which the metric data was returned in ISO 8601 duration format with a
- special case for 'FULL' value that returns single datapoint for entire
- time span requested ("" *Examples: PT15M, PT1H, P1D, FULL*"" ). This may
- be adjusted and different from what was originally requested if
- AutoAdjustTimegrain=true is specified.
- "namespace": "str", # Optional. The namespace of the metrics
- been queried.
- "resourceid": "str", # Optional. The resource that has been
- queried for metrics.
- "resourceregion": "str" # Optional. The region of the
- resource been queried for metrics.
+ "interval": "str",
+ "namespace": "str",
+ "resourceid": "str",
+ "resourceregion": "str"
}
]
}
@@ -246,7 +213,6 @@ async def batch(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Lists the metric values for multiple resources.
:param subscription_id: The subscription identifier for the resources in this batch. Required.
@@ -284,13 +250,13 @@ async def batch(
:paramtype orderby: str
:keyword filter: The filter is used to reduce the set of metric data
returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`-
- Return all time series of C where A = a1 and B = b1 or b2:code:`
`\ **filter=A eq ‘a1’ and B
- eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\ :code:`
`- Invalid variant::code:`
`\ **filter=A eq
- ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\ :code:`
`This is invalid because the logical
- or operator cannot separate two different metadata names.:code:`
`- Return all time series
- where A = a1, B = b1 and C = c1::code:`
`\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq ‘c1’**\
- :code:`
`- Return all time series where A = a1:code:`
`\ **filter=A eq ‘a1’ and B eq ‘\
- *’ and C eq ‘*\ ’**. Default value is None.
+ Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **filter=A eq ‘a1’ and
+ B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ **filter=A
+ eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid because the
+ logical or operator cannot separate two different metadata names.:code:`
`- Return all time
+ series where A = a1, B = b1 and C = c1::code:`
`\\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq
+ ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ **filter=A eq ‘a1’ and
+ B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None.
:paramtype filter: str
:keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see
metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see
@@ -311,85 +277,54 @@ async def batch(
response == {
"values": [
{
- "endtime": "str", # The end time, in datetime format, for
- which the data was retrieved. Required.
- "starttime": "str", # The start time, in datetime format,
- for which the data was retrieved. Required.
+ "endtime": "str",
+ "starttime": "str",
"value": [
{
- "id": "str", # The metric Id. Required.
+ "id": "str",
"name": {
- "value": "str", # The invariant
- value. Required.
- "localizedValue": "str" # Optional.
- The display name.
+ "value": "str",
+ "localizedValue": "str"
},
"timeseries": [
{
"data": [
{
"timeStamp":
- "2020-02-20 00:00:00", # The timestamp for the
- metric value in ISO 8601 format. Required.
+ "2020-02-20 00:00:00",
"average":
- 0.0, # Optional. The average value in the time
- range.
+ 0.0,
"count": 0.0,
- # Optional. The number of samples in the time
- range. Can be used to determine the number of
- values that contributed to the average value.
"maximum":
- 0.0, # Optional. The greatest value in the time
- range.
+ 0.0,
"minimum":
- 0.0, # Optional. The least value in the time
- range.
+ 0.0,
"total": 0.0
- # Optional. The sum of all of the values in the
- time range.
}
],
"metadatavalues": [
{
"name": {
- "value": "str", # The invariant value.
- Required.
- "localizedValue": "str" # Optional. The
- display name.
+ "value": "str",
+ "localizedValue": "str"
},
"value":
- "str" # Optional. The value of the metadata.
+ "str"
}
]
}
],
- "type": "str", # The resource type of the
- metric resource. Required.
- "unit": "str", # The unit of the metric.
- Required. Known values are: "Count", "Bytes", "Seconds",
- "CountPerSecond", "BytesPerSecond", "Percent", "MilliSeconds",
- "ByteSeconds", "Unspecified", "Cores", "MilliCores", "NanoCores",
- and "BitsPerSecond".
- "displayDescription": "str", # Optional.
- Detailed description of this metric.
- "errorCode": "str", # Optional. 'Success' or
- the error details on query failures for this metric.
- "errorMessage": "str" # Optional. Error
- message encountered querying this specific metric.
+ "type": "str",
+ "unit": "str",
+ "displayDescription": "str",
+ "errorCode": "str",
+ "errorMessage": "str"
}
],
- "interval": "str", # Optional. The interval (window size)
- for which the metric data was returned in ISO 8601 duration format with a
- special case for 'FULL' value that returns single datapoint for entire
- time span requested ("" *Examples: PT15M, PT1H, P1D, FULL*"" ). This may
- be adjusted and different from what was originally requested if
- AutoAdjustTimegrain=true is specified.
- "namespace": "str", # Optional. The namespace of the metrics
- been queried.
- "resourceid": "str", # Optional. The resource that has been
- queried for metrics.
- "resourceregion": "str" # Optional. The region of the
- resource been queried for metrics.
+ "interval": "str",
+ "namespace": "str",
+ "resourceid": "str",
+ "resourceregion": "str"
}
]
}
@@ -413,7 +348,6 @@ async def batch(
rollupby: Optional[str] = None,
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Lists the metric values for multiple resources.
:param subscription_id: The subscription identifier for the resources in this batch. Required.
@@ -452,13 +386,13 @@ async def batch(
:paramtype orderby: str
:keyword filter: The filter is used to reduce the set of metric data
returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`-
- Return all time series of C where A = a1 and B = b1 or b2:code:`
`\ **filter=A eq ‘a1’ and B
- eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\ :code:`
`- Invalid variant::code:`
`\ **filter=A eq
- ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\ :code:`
`This is invalid because the logical
- or operator cannot separate two different metadata names.:code:`
`- Return all time series
- where A = a1, B = b1 and C = c1::code:`
`\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq ‘c1’**\
- :code:`
`- Return all time series where A = a1:code:`
`\ **filter=A eq ‘a1’ and B eq ‘\
- *’ and C eq ‘*\ ’**. Default value is None.
+ Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **filter=A eq ‘a1’ and
+ B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ **filter=A
+ eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid because the
+ logical or operator cannot separate two different metadata names.:code:`
`- Return all time
+ series where A = a1, B = b1 and C = c1::code:`
`\\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq
+ ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ **filter=A eq ‘a1’ and
+ B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None.
:paramtype filter: str
:keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see
metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see
@@ -475,7 +409,7 @@ async def batch(
# JSON input template you can fill out and use as your body input.
batch_request = {
"resourceids": [
- "str" # Optional. The list of resource IDs to query metrics for.
+ "str"
]
}
@@ -483,90 +417,59 @@ async def batch(
response == {
"values": [
{
- "endtime": "str", # The end time, in datetime format, for
- which the data was retrieved. Required.
- "starttime": "str", # The start time, in datetime format,
- for which the data was retrieved. Required.
+ "endtime": "str",
+ "starttime": "str",
"value": [
{
- "id": "str", # The metric Id. Required.
+ "id": "str",
"name": {
- "value": "str", # The invariant
- value. Required.
- "localizedValue": "str" # Optional.
- The display name.
+ "value": "str",
+ "localizedValue": "str"
},
"timeseries": [
{
"data": [
{
"timeStamp":
- "2020-02-20 00:00:00", # The timestamp for the
- metric value in ISO 8601 format. Required.
+ "2020-02-20 00:00:00",
"average":
- 0.0, # Optional. The average value in the time
- range.
+ 0.0,
"count": 0.0,
- # Optional. The number of samples in the time
- range. Can be used to determine the number of
- values that contributed to the average value.
"maximum":
- 0.0, # Optional. The greatest value in the time
- range.
+ 0.0,
"minimum":
- 0.0, # Optional. The least value in the time
- range.
+ 0.0,
"total": 0.0
- # Optional. The sum of all of the values in the
- time range.
}
],
"metadatavalues": [
{
"name": {
- "value": "str", # The invariant value.
- Required.
- "localizedValue": "str" # Optional. The
- display name.
+ "value": "str",
+ "localizedValue": "str"
},
"value":
- "str" # Optional. The value of the metadata.
+ "str"
}
]
}
],
- "type": "str", # The resource type of the
- metric resource. Required.
- "unit": "str", # The unit of the metric.
- Required. Known values are: "Count", "Bytes", "Seconds",
- "CountPerSecond", "BytesPerSecond", "Percent", "MilliSeconds",
- "ByteSeconds", "Unspecified", "Cores", "MilliCores", "NanoCores",
- and "BitsPerSecond".
- "displayDescription": "str", # Optional.
- Detailed description of this metric.
- "errorCode": "str", # Optional. 'Success' or
- the error details on query failures for this metric.
- "errorMessage": "str" # Optional. Error
- message encountered querying this specific metric.
+ "type": "str",
+ "unit": "str",
+ "displayDescription": "str",
+ "errorCode": "str",
+ "errorMessage": "str"
}
],
- "interval": "str", # Optional. The interval (window size)
- for which the metric data was returned in ISO 8601 duration format with a
- special case for 'FULL' value that returns single datapoint for entire
- time span requested ("" *Examples: PT15M, PT1H, P1D, FULL*"" ). This may
- be adjusted and different from what was originally requested if
- AutoAdjustTimegrain=true is specified.
- "namespace": "str", # Optional. The namespace of the metrics
- been queried.
- "resourceid": "str", # Optional. The resource that has been
- queried for metrics.
- "resourceregion": "str" # Optional. The region of the
- resource been queried for metrics.
+ "interval": "str",
+ "namespace": "str",
+ "resourceid": "str",
+ "resourceregion": "str"
}
]
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -620,8 +523,6 @@ async def batch(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/__init__.py
index 0681a236948f..3a8ca7f7491d 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/__init__.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import MetricsBatchOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import MetricsBatchOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"MetricsBatchOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/_operations.py
index 36ceaa06595c..e4cd867f64fa 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/_operations.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -9,7 +8,7 @@
import datetime
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, List, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -29,7 +28,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -133,7 +132,6 @@ def batch(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Lists the metric values for multiple resources.
:param subscription_id: The subscription identifier for the resources in this batch. Required.
@@ -171,13 +169,13 @@ def batch(
:paramtype orderby: str
:keyword filter: The filter is used to reduce the set of metric data
returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`-
- Return all time series of C where A = a1 and B = b1 or b2:code:`
`\ **filter=A eq ‘a1’ and B
- eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\ :code:`
`- Invalid variant::code:`
`\ **filter=A eq
- ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\ :code:`
`This is invalid because the logical
- or operator cannot separate two different metadata names.:code:`
`- Return all time series
- where A = a1, B = b1 and C = c1::code:`
`\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq ‘c1’**\
- :code:`
`- Return all time series where A = a1:code:`
`\ **filter=A eq ‘a1’ and B eq ‘\
- *’ and C eq ‘*\ ’**. Default value is None.
+ Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **filter=A eq ‘a1’ and
+ B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ **filter=A
+ eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid because the
+ logical or operator cannot separate two different metadata names.:code:`
`- Return all time
+ series where A = a1, B = b1 and C = c1::code:`
`\\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq
+ ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ **filter=A eq ‘a1’ and
+ B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None.
:paramtype filter: str
:keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see
metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see
@@ -197,7 +195,7 @@ def batch(
# JSON input template you can fill out and use as your body input.
batch_request = {
"resourceids": [
- "str" # Optional. The list of resource IDs to query metrics for.
+ "str"
]
}
@@ -205,85 +203,54 @@ def batch(
response == {
"values": [
{
- "endtime": "str", # The end time, in datetime format, for
- which the data was retrieved. Required.
- "starttime": "str", # The start time, in datetime format,
- for which the data was retrieved. Required.
+ "endtime": "str",
+ "starttime": "str",
"value": [
{
- "id": "str", # The metric Id. Required.
+ "id": "str",
"name": {
- "value": "str", # The invariant
- value. Required.
- "localizedValue": "str" # Optional.
- The display name.
+ "value": "str",
+ "localizedValue": "str"
},
"timeseries": [
{
"data": [
{
"timeStamp":
- "2020-02-20 00:00:00", # The timestamp for the
- metric value in ISO 8601 format. Required.
+ "2020-02-20 00:00:00",
"average":
- 0.0, # Optional. The average value in the time
- range.
+ 0.0,
"count": 0.0,
- # Optional. The number of samples in the time
- range. Can be used to determine the number of
- values that contributed to the average value.
"maximum":
- 0.0, # Optional. The greatest value in the time
- range.
+ 0.0,
"minimum":
- 0.0, # Optional. The least value in the time
- range.
+ 0.0,
"total": 0.0
- # Optional. The sum of all of the values in the
- time range.
}
],
"metadatavalues": [
{
"name": {
- "value": "str", # The invariant value.
- Required.
- "localizedValue": "str" # Optional. The
- display name.
+ "value": "str",
+ "localizedValue": "str"
},
"value":
- "str" # Optional. The value of the metadata.
+ "str"
}
]
}
],
- "type": "str", # The resource type of the
- metric resource. Required.
- "unit": "str", # The unit of the metric.
- Required. Known values are: "Count", "Bytes", "Seconds",
- "CountPerSecond", "BytesPerSecond", "Percent", "MilliSeconds",
- "ByteSeconds", "Unspecified", "Cores", "MilliCores", "NanoCores",
- and "BitsPerSecond".
- "displayDescription": "str", # Optional.
- Detailed description of this metric.
- "errorCode": "str", # Optional. 'Success' or
- the error details on query failures for this metric.
- "errorMessage": "str" # Optional. Error
- message encountered querying this specific metric.
+ "type": "str",
+ "unit": "str",
+ "displayDescription": "str",
+ "errorCode": "str",
+ "errorMessage": "str"
}
],
- "interval": "str", # Optional. The interval (window size)
- for which the metric data was returned in ISO 8601 duration format with a
- special case for 'FULL' value that returns single datapoint for entire
- time span requested ("" *Examples: PT15M, PT1H, P1D, FULL*"" ). This may
- be adjusted and different from what was originally requested if
- AutoAdjustTimegrain=true is specified.
- "namespace": "str", # Optional. The namespace of the metrics
- been queried.
- "resourceid": "str", # Optional. The resource that has been
- queried for metrics.
- "resourceregion": "str" # Optional. The region of the
- resource been queried for metrics.
+ "interval": "str",
+ "namespace": "str",
+ "resourceid": "str",
+ "resourceregion": "str"
}
]
}
@@ -308,7 +275,6 @@ def batch(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Lists the metric values for multiple resources.
:param subscription_id: The subscription identifier for the resources in this batch. Required.
@@ -346,13 +312,13 @@ def batch(
:paramtype orderby: str
:keyword filter: The filter is used to reduce the set of metric data
returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`-
- Return all time series of C where A = a1 and B = b1 or b2:code:`
`\ **filter=A eq ‘a1’ and B
- eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\ :code:`
`- Invalid variant::code:`
`\ **filter=A eq
- ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\ :code:`
`This is invalid because the logical
- or operator cannot separate two different metadata names.:code:`
`- Return all time series
- where A = a1, B = b1 and C = c1::code:`
`\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq ‘c1’**\
- :code:`
`- Return all time series where A = a1:code:`
`\ **filter=A eq ‘a1’ and B eq ‘\
- *’ and C eq ‘*\ ’**. Default value is None.
+ Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **filter=A eq ‘a1’ and
+ B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ **filter=A
+ eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid because the
+ logical or operator cannot separate two different metadata names.:code:`
`- Return all time
+ series where A = a1, B = b1 and C = c1::code:`
`\\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq
+ ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ **filter=A eq ‘a1’ and
+ B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None.
:paramtype filter: str
:keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see
metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see
@@ -373,85 +339,54 @@ def batch(
response == {
"values": [
{
- "endtime": "str", # The end time, in datetime format, for
- which the data was retrieved. Required.
- "starttime": "str", # The start time, in datetime format,
- for which the data was retrieved. Required.
+ "endtime": "str",
+ "starttime": "str",
"value": [
{
- "id": "str", # The metric Id. Required.
+ "id": "str",
"name": {
- "value": "str", # The invariant
- value. Required.
- "localizedValue": "str" # Optional.
- The display name.
+ "value": "str",
+ "localizedValue": "str"
},
"timeseries": [
{
"data": [
{
"timeStamp":
- "2020-02-20 00:00:00", # The timestamp for the
- metric value in ISO 8601 format. Required.
+ "2020-02-20 00:00:00",
"average":
- 0.0, # Optional. The average value in the time
- range.
+ 0.0,
"count": 0.0,
- # Optional. The number of samples in the time
- range. Can be used to determine the number of
- values that contributed to the average value.
"maximum":
- 0.0, # Optional. The greatest value in the time
- range.
+ 0.0,
"minimum":
- 0.0, # Optional. The least value in the time
- range.
+ 0.0,
"total": 0.0
- # Optional. The sum of all of the values in the
- time range.
}
],
"metadatavalues": [
{
"name": {
- "value": "str", # The invariant value.
- Required.
- "localizedValue": "str" # Optional. The
- display name.
+ "value": "str",
+ "localizedValue": "str"
},
"value":
- "str" # Optional. The value of the metadata.
+ "str"
}
]
}
],
- "type": "str", # The resource type of the
- metric resource. Required.
- "unit": "str", # The unit of the metric.
- Required. Known values are: "Count", "Bytes", "Seconds",
- "CountPerSecond", "BytesPerSecond", "Percent", "MilliSeconds",
- "ByteSeconds", "Unspecified", "Cores", "MilliCores", "NanoCores",
- and "BitsPerSecond".
- "displayDescription": "str", # Optional.
- Detailed description of this metric.
- "errorCode": "str", # Optional. 'Success' or
- the error details on query failures for this metric.
- "errorMessage": "str" # Optional. Error
- message encountered querying this specific metric.
+ "type": "str",
+ "unit": "str",
+ "displayDescription": "str",
+ "errorCode": "str",
+ "errorMessage": "str"
}
],
- "interval": "str", # Optional. The interval (window size)
- for which the metric data was returned in ISO 8601 duration format with a
- special case for 'FULL' value that returns single datapoint for entire
- time span requested ("" *Examples: PT15M, PT1H, P1D, FULL*"" ). This may
- be adjusted and different from what was originally requested if
- AutoAdjustTimegrain=true is specified.
- "namespace": "str", # Optional. The namespace of the metrics
- been queried.
- "resourceid": "str", # Optional. The resource that has been
- queried for metrics.
- "resourceregion": "str" # Optional. The region of the
- resource been queried for metrics.
+ "interval": "str",
+ "namespace": "str",
+ "resourceid": "str",
+ "resourceregion": "str"
}
]
}
@@ -475,7 +410,6 @@ def batch(
rollupby: Optional[str] = None,
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Lists the metric values for multiple resources.
:param subscription_id: The subscription identifier for the resources in this batch. Required.
@@ -514,13 +448,13 @@ def batch(
:paramtype orderby: str
:keyword filter: The filter is used to reduce the set of metric data
returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`-
- Return all time series of C where A = a1 and B = b1 or b2:code:`
`\ **filter=A eq ‘a1’ and B
- eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\ :code:`
`- Invalid variant::code:`
`\ **filter=A eq
- ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\ :code:`
`This is invalid because the logical
- or operator cannot separate two different metadata names.:code:`
`- Return all time series
- where A = a1, B = b1 and C = c1::code:`
`\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq ‘c1’**\
- :code:`
`- Return all time series where A = a1:code:`
`\ **filter=A eq ‘a1’ and B eq ‘\
- *’ and C eq ‘*\ ’**. Default value is None.
+ Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **filter=A eq ‘a1’ and
+ B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ **filter=A
+ eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid because the
+ logical or operator cannot separate two different metadata names.:code:`
`- Return all time
+ series where A = a1, B = b1 and C = c1::code:`
`\\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq
+ ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ **filter=A eq ‘a1’ and
+ B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None.
:paramtype filter: str
:keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see
metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see
@@ -537,7 +471,7 @@ def batch(
# JSON input template you can fill out and use as your body input.
batch_request = {
"resourceids": [
- "str" # Optional. The list of resource IDs to query metrics for.
+ "str"
]
}
@@ -545,90 +479,59 @@ def batch(
response == {
"values": [
{
- "endtime": "str", # The end time, in datetime format, for
- which the data was retrieved. Required.
- "starttime": "str", # The start time, in datetime format,
- for which the data was retrieved. Required.
+ "endtime": "str",
+ "starttime": "str",
"value": [
{
- "id": "str", # The metric Id. Required.
+ "id": "str",
"name": {
- "value": "str", # The invariant
- value. Required.
- "localizedValue": "str" # Optional.
- The display name.
+ "value": "str",
+ "localizedValue": "str"
},
"timeseries": [
{
"data": [
{
"timeStamp":
- "2020-02-20 00:00:00", # The timestamp for the
- metric value in ISO 8601 format. Required.
+ "2020-02-20 00:00:00",
"average":
- 0.0, # Optional. The average value in the time
- range.
+ 0.0,
"count": 0.0,
- # Optional. The number of samples in the time
- range. Can be used to determine the number of
- values that contributed to the average value.
"maximum":
- 0.0, # Optional. The greatest value in the time
- range.
+ 0.0,
"minimum":
- 0.0, # Optional. The least value in the time
- range.
+ 0.0,
"total": 0.0
- # Optional. The sum of all of the values in the
- time range.
}
],
"metadatavalues": [
{
"name": {
- "value": "str", # The invariant value.
- Required.
- "localizedValue": "str" # Optional. The
- display name.
+ "value": "str",
+ "localizedValue": "str"
},
"value":
- "str" # Optional. The value of the metadata.
+ "str"
}
]
}
],
- "type": "str", # The resource type of the
- metric resource. Required.
- "unit": "str", # The unit of the metric.
- Required. Known values are: "Count", "Bytes", "Seconds",
- "CountPerSecond", "BytesPerSecond", "Percent", "MilliSeconds",
- "ByteSeconds", "Unspecified", "Cores", "MilliCores", "NanoCores",
- and "BitsPerSecond".
- "displayDescription": "str", # Optional.
- Detailed description of this metric.
- "errorCode": "str", # Optional. 'Success' or
- the error details on query failures for this metric.
- "errorMessage": "str" # Optional. Error
- message encountered querying this specific metric.
+ "type": "str",
+ "unit": "str",
+ "displayDescription": "str",
+ "errorCode": "str",
+ "errorMessage": "str"
}
],
- "interval": "str", # Optional. The interval (window size)
- for which the metric data was returned in ISO 8601 duration format with a
- special case for 'FULL' value that returns single datapoint for entire
- time span requested ("" *Examples: PT15M, PT1H, P1D, FULL*"" ). This may
- be adjusted and different from what was originally requested if
- AutoAdjustTimegrain=true is specified.
- "namespace": "str", # Optional. The namespace of the metrics
- been queried.
- "resourceid": "str", # Optional. The resource that has been
- queried for metrics.
- "resourceregion": "str" # Optional. The region of the
- resource been queried for metrics.
+ "interval": "str",
+ "namespace": "str",
+ "resourceid": "str",
+ "resourceregion": "str"
}
]
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -682,8 +585,6 @@ def batch(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/__init__.py
index e1fc034d9277..64e613d8c69b 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/__init__.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/__init__.py
@@ -5,13 +5,19 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import MetricDefinitionsOperations
-from ._operations import MetricsOperations
-from ._operations import MetricNamespacesOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import MetricDefinitionsOperations # type: ignore
+from ._operations import MetricsOperations # type: ignore
+from ._operations import MetricNamespacesOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -19,5 +25,5 @@
"MetricsOperations",
"MetricNamespacesOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/_operations.py
index 3c194b87c8c5..b8973c7292a6 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/_operations.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
import datetime
import sys
-from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar, cast
+from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, cast
import urllib.parse
from azure.core.exceptions import (
@@ -30,7 +29,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -178,7 +177,6 @@ def __init__(self, *args, **kwargs):
@distributed_trace
def list(self, resource_uri: str, *, metricnamespace: Optional[str] = None, **kwargs: Any) -> Iterable[JSON]:
- # pylint: disable=line-too-long
"""Lists the metric definitions for the resource.
:param resource_uri: The identifier of the resource. Required.
@@ -195,48 +193,34 @@ def list(self, resource_uri: str, *, metricnamespace: Optional[str] = None, **kw
# response body for status code(s): 200
response == {
- "category": "str", # Optional. Custom category name for this metric.
+ "category": "str",
"dimensions": [
{
- "value": "str", # The invariant value. Required.
- "localizedValue": "str" # Optional. The display name.
+ "value": "str",
+ "localizedValue": "str"
}
],
- "displayDescription": "str", # Optional. Detailed description of this
- metric.
- "id": "str", # Optional. The resource identifier of the metric definition.
- "isDimensionRequired": bool, # Optional. Flag to indicate whether the
- dimension is required.
+ "displayDescription": "str",
+ "id": "str",
+ "isDimensionRequired": bool,
"metricAvailabilities": [
{
- "retention": "1 day, 0:00:00", # Optional. The retention
- period for the metric at the specified timegrain. Expressed as a
- duration 'PT1M', 'P1D', etc.
- "timeGrain": "1 day, 0:00:00" # Optional. The time grain
- specifies a supported aggregation interval for the metric. Expressed as a
- duration 'PT1M', 'P1D', etc.
+ "retention": "1 day, 0:00:00",
+ "timeGrain": "1 day, 0:00:00"
}
],
- "metricClass": "str", # Optional. The class of the metric. Known values are:
- "Availability", "Transactions", "Errors", "Latency", and "Saturation".
+ "metricClass": "str",
"name": {
- "value": "str", # The invariant value. Required.
- "localizedValue": "str" # Optional. The display name.
+ "value": "str",
+ "localizedValue": "str"
},
- "namespace": "str", # Optional. The namespace the metric belongs to.
- "primaryAggregationType": "str", # Optional. The primary aggregation type
- value defining how to use the values for display. Known values are: "None",
- "Average", "Count", "Minimum", "Maximum", and "Total".
- "resourceId": "str", # Optional. The resource identifier of the resource
- that emitted the metric.
+ "namespace": "str",
+ "primaryAggregationType": "str",
+ "resourceId": "str",
"supportedAggregationTypes": [
- "str" # Optional. The collection of what aggregation types are
- supported.
+ "str"
],
- "unit": "str" # Optional. The unit of the metric. Known values are: "Count",
- "Bytes", "Seconds", "CountPerSecond", "BytesPerSecond", "Percent",
- "MilliSeconds", "ByteSeconds", "Unspecified", "Cores", "MilliCores", "NanoCores",
- and "BitsPerSecond".
+ "unit": "str"
}
"""
_headers = kwargs.pop("headers", {}) or {}
@@ -244,7 +228,7 @@ def list(self, resource_uri: str, *, metricnamespace: Optional[str] = None, **kw
cls: ClsType[JSON] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -298,8 +282,6 @@ def get_next(next_link=None):
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -344,7 +326,6 @@ def list(
rollupby: Optional[str] = None,
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""**Lists the metric values for a resource**.
:param resource_uri: The identifier of the resource. Required.
@@ -373,13 +354,13 @@ def list(
:paramtype orderby: str
:keyword filter: The **$filter** is used to reduce the set of metric data
returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`-
- Return all time series of C where A = a1 and B = b1 or b2:code:`
`\ **$filter=A eq ‘a1’ and
- B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\ :code:`
`- Invalid variant::code:`
`\ **$filter=A
- eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\ :code:`
`This is invalid because the
- logical or operator cannot separate two different metadata names.:code:`
`- Return all time
- series where A = a1, B = b1 and C = c1::code:`
`\ **$filter=A eq ‘a1’ and B eq ‘b1’ and C eq
- ‘c1’**\ :code:`
`- Return all time series where A = a1:code:`
`\ **$filter=A eq ‘a1’ and
- B eq ‘\ *’ and C eq ‘*\ ’**. Default value is None.
+ Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **$filter=A eq ‘a1’ and
+ B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\
+ **$filter=A eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid
+ because the logical or operator cannot separate two different metadata names.:code:`
`-
+ Return all time series where A = a1, B = b1 and C = c1::code:`
`\\ **$filter=A eq ‘a1’ and B
+ eq ‘b1’ and C eq ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\
+ **$filter=A eq ‘a1’ and B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None.
:paramtype filter: str
:keyword result_type: Reduces the set of data collected. The syntax allowed depends on the
operation. See the operation's description for details. Known values are: "Data" and
@@ -411,81 +392,53 @@ def list(
# response body for status code(s): 200
response == {
- "timespan": "str", # The timespan for which the data was retrieved. Its
- value consists of two datetimes concatenated, separated by '/'. This may be
- adjusted in the future and returned back from what was originally requested.
- Required.
+ "timespan": "str",
"value": [
{
- "id": "str", # The metric Id. Required.
+ "id": "str",
"name": {
- "value": "str", # The invariant value. Required.
- "localizedValue": "str" # Optional. The display
- name.
+ "value": "str",
+ "localizedValue": "str"
},
"timeseries": [
{
"data": [
{
"timeStamp": "2020-02-20
- 00:00:00", # The timestamp for the metric value in ISO
- 8601 format. Required.
- "average": 0.0, # Optional.
- The average value in the time range.
- "count": 0.0, # Optional.
- The number of samples in the time range. Can be used to
- determine the number of values that contributed to the
- average value.
- "maximum": 0.0, # Optional.
- The greatest value in the time range.
- "minimum": 0.0, # Optional.
- The least value in the time range.
- "total": 0.0 # Optional. The
- sum of all of the values in the time range.
+ 00:00:00",
+ "average": 0.0,
+ "count": 0.0,
+ "maximum": 0.0,
+ "minimum": 0.0,
+ "total": 0.0
}
],
"metadatavalues": [
{
"name": {
- "value": "str", #
- The invariant value. Required.
+ "value": "str",
"localizedValue":
- "str" # Optional. The display name.
+ "str"
},
- "value": "str" # Optional.
- The value of the metadata.
+ "value": "str"
}
]
}
],
- "type": "str", # The resource type of the metric resource.
- Required.
- "unit": "str", # The unit of the metric. Required. Known
- values are: "Count", "Bytes", "Seconds", "CountPerSecond",
- "BytesPerSecond", "Percent", "MilliSeconds", "ByteSeconds",
- "Unspecified", "Cores", "MilliCores", "NanoCores", and "BitsPerSecond".
- "displayDescription": "str", # Optional. Detailed
- description of this metric.
- "errorCode": "str", # Optional. 'Success' or the error
- details on query failures for this metric.
- "errorMessage": "str" # Optional. Error message encountered
- querying this specific metric.
+ "type": "str",
+ "unit": "str",
+ "displayDescription": "str",
+ "errorCode": "str",
+ "errorMessage": "str"
}
],
- "cost": 0, # Optional. The integer value representing the relative cost of
- the query.
- "interval": "str", # Optional. The interval (window size) for which the
- metric data was returned in ISO 8601 duration format with a special case for
- 'FULL' value that returns single datapoint for entire time span requested (""
- *Examples: PT15M, PT1H, P1D, FULL*"" ). This may be adjusted and different from
- what was originally requested if AutoAdjustTimegrain=true is specified. This is
- not present if a metadata request was made.
- "namespace": "str", # Optional. The namespace of the metrics being queried.
- "resourceregion": "str" # Optional. The region of the resource being queried
- for metrics.
+ "cost": 0,
+ "interval": "str",
+ "namespace": "str",
+ "resourceregion": "str"
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -526,8 +479,6 @@ def list(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -577,14 +528,13 @@ def list(self, resource_uri: str, *, start_time: Optional[str] = None, **kwargs:
# response body for status code(s): 200
response == {
- "classification": "str", # Optional. Kind of namespace. Known values are:
- "Platform", "Custom", and "Qos".
- "id": "str", # Optional. The ID of the metric namespace.
- "name": "str", # Optional. The escaped name of the namespace.
+ "classification": "str",
+ "id": "str",
+ "name": "str",
"properties": {
- "metricNamespaceName": "str" # Optional. The metric namespace name.
+ "metricNamespaceName": "str"
},
- "type": "str" # Optional. The type of the namespace.
+ "type": "str"
}
"""
_headers = kwargs.pop("headers", {}) or {}
@@ -592,7 +542,7 @@ def list(self, resource_uri: str, *, start_time: Optional[str] = None, **kwargs:
cls: ClsType[JSON] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -646,8 +596,6 @@ def get_next(next_link=None):
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/__init__.py
index f20f4cbfa4f3..e4231c981f36 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/__init__.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/__init__.py
@@ -5,17 +5,23 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import QueryOperations
-from ._operations import MetadataOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import QueryOperations # type: ignore
+from ._operations import MetadataOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"QueryOperations",
"MetadataOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/_operations.py
index db1e79507d1a..875c13d68051 100644
--- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/_operations.py
+++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -9,7 +9,7 @@
import datetime
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -29,7 +29,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -271,7 +271,6 @@ def __init__(self, *args, **kwargs):
def get(
self, workspace_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query.
Executes an Analytics query for data.
@@ -300,48 +299,39 @@ def get(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -371,8 +361,6 @@ def get(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -396,7 +384,6 @@ def execute(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query.
Executes an Analytics query for data. `Here
@@ -425,13 +412,10 @@ def execute(
# JSON input template you can fill out and use as your body input.
body = {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The timespan over which to query
- data. This is an ISO8601 time period value. This timespan is applied in addition
- to any that are specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to query in addition to the
- primary workspace.
+ "str"
]
}
@@ -441,45 +425,36 @@ def execute(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
@@ -493,7 +468,6 @@ def execute(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query.
Executes an Analytics query for data. `Here
@@ -526,45 +500,36 @@ def execute(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
@@ -572,7 +537,6 @@ def execute(
def execute(
self, workspace_id: str, body: Union[JSON, IO[bytes]], *, prefer: Optional[str] = None, **kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query.
Executes an Analytics query for data. `Here
@@ -598,13 +562,10 @@ def execute(
# JSON input template you can fill out and use as your body input.
body = {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The timespan over which to query
- data. This is an ISO8601 time period value. This timespan is applied in addition
- to any that are specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to query in addition to the
- primary workspace.
+ "str"
]
}
@@ -614,48 +575,39 @@ def execute(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -696,8 +648,6 @@ def execute(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -715,7 +665,6 @@ def execute(
def resource_get(
self, resource_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource URI.
Executes an Analytics query for data in the context of a resource. `Here
@@ -745,48 +694,39 @@ def resource_get(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -816,8 +756,6 @@ def resource_get(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -841,7 +779,6 @@ def resource_execute(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource ID.
Executes an Analytics query for data in the context of a resource. `Here
@@ -869,13 +806,10 @@ def resource_execute(
# JSON input template you can fill out and use as your body input.
body = {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The timespan over which to query
- data. This is an ISO8601 time period value. This timespan is applied in addition
- to any that are specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to query in addition to the
- primary workspace.
+ "str"
]
}
@@ -885,45 +819,36 @@ def resource_execute(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
@@ -937,7 +862,6 @@ def resource_execute(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource ID.
Executes an Analytics query for data in the context of a resource. `Here
@@ -969,45 +893,36 @@ def resource_execute(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
@@ -1015,7 +930,6 @@ def resource_execute(
def resource_execute(
self, resource_id: str, body: Union[JSON, IO[bytes]], *, prefer: Optional[str] = None, **kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource ID.
Executes an Analytics query for data in the context of a resource. `Here
@@ -1040,13 +954,10 @@ def resource_execute(
# JSON input template you can fill out and use as your body input.
body = {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The timespan over which to query
- data. This is an ISO8601 time period value. This timespan is applied in addition
- to any that are specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to query in addition to the
- primary workspace.
+ "str"
]
}
@@ -1056,48 +967,39 @@ def resource_execute(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1138,8 +1040,6 @@ def resource_execute(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -1155,7 +1055,6 @@ def resource_execute(
@overload
def batch(self, body: JSON, *, content_type: str = "application/json", **kwargs: Any) -> JSON:
- # pylint: disable=line-too-long
"""Execute a batch of Analytics queries.
Executes a batch of Analytics queries for data. `Here
@@ -1179,30 +1078,19 @@ def batch(self, body: JSON, *, content_type: str = "application/json", **kwargs:
"requests": [
{
"body": {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The
- timespan over which to query data. This is an ISO8601 time period
- value. This timespan is applied in addition to any that are
- specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to
- query in addition to the primary workspace.
+ "str"
]
},
- "id": "str", # Unique ID corresponding to each request in
- the batch. Required.
- "workspace": "str", # Primary Workspace ID of the query.
- This is the Workspace ID from the Properties blade in the Azure portal.
- Required.
+ "id": "str",
+ "workspace": "str",
"headers": {
- "str": "str" # Optional. Headers of the request. Can
- use prefer header to set server timeout and to query statistics and
- visualization information.
+ "str": "str"
},
- "method": "POST", # Optional. Default value is "POST". The
- method of a single request in a batch, defaults to POST.
- "path": "/query" # Optional. Default value is "/query". The
- query path of a single request in a batch, defaults to /query.
+ "method": "POST",
+ "path": "/query"
}
]
}
@@ -1213,66 +1101,45 @@ def batch(self, body: JSON, *, content_type: str = "application/json", **kwargs:
{
"body": {
"error": {
- "code": "str", # A machine readable error
- code. Required.
- "message": "str", # A human readable error
- message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's
- code. Required.
- "message": "str", # A human
- readable error message. Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional.
- Indicates resources which were responsible for the
- error.
+ "str"
],
- "target": "str", # Optional.
- Indicates which property in the request is responsible
- for the error.
- "value": "str" # Optional.
- Indicates which value in 'target' is responsible for the
- error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON
- format.
- "statistics": {}, # Optional. Statistics represented
- in JSON format.
+ "render": {},
+ "statistics": {},
"tables": [
{
"columns": [
{
- "name": "str", # The
- name of this column. Required.
- "type": "str" # The
- data type of this column. Required. Known values are:
- "bool", "datetime", "dynamic", "int", "long", "real",
- "string", "guid", "decimal", and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the
- table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting
- rows from this query. Required.
+ {}
]
]
}
]
},
"headers": {
- "str": "str" # Optional. Dictionary of
- :code:``.
+ "str": "str"
},
- "id": "str", # Optional. An array of responses corresponding
- to each individual request in a batch.
- "status": 0 # Optional. An array of responses corresponding
- to each individual request in a batch.
+ "id": "str",
+ "status": 0
}
]
}
@@ -1280,7 +1147,6 @@ def batch(self, body: JSON, *, content_type: str = "application/json", **kwargs:
@overload
def batch(self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any) -> JSON:
- # pylint: disable=line-too-long
"""Execute a batch of Analytics queries.
Executes a batch of Analytics queries for data. `Here
@@ -1305,66 +1171,45 @@ def batch(self, body: IO[bytes], *, content_type: str = "application/json", **kw
{
"body": {
"error": {
- "code": "str", # A machine readable error
- code. Required.
- "message": "str", # A human readable error
- message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's
- code. Required.
- "message": "str", # A human
- readable error message. Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional.
- Indicates resources which were responsible for the
- error.
+ "str"
],
- "target": "str", # Optional.
- Indicates which property in the request is responsible
- for the error.
- "value": "str" # Optional.
- Indicates which value in 'target' is responsible for the
- error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON
- format.
- "statistics": {}, # Optional. Statistics represented
- in JSON format.
+ "render": {},
+ "statistics": {},
"tables": [
{
"columns": [
{
- "name": "str", # The
- name of this column. Required.
- "type": "str" # The
- data type of this column. Required. Known values are:
- "bool", "datetime", "dynamic", "int", "long", "real",
- "string", "guid", "decimal", and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the
- table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting
- rows from this query. Required.
+ {}
]
]
}
]
},
"headers": {
- "str": "str" # Optional. Dictionary of
- :code:``.
+ "str": "str"
},
- "id": "str", # Optional. An array of responses corresponding
- to each individual request in a batch.
- "status": 0 # Optional. An array of responses corresponding
- to each individual request in a batch.
+ "id": "str",
+ "status": 0
}
]
}
@@ -1372,7 +1217,6 @@ def batch(self, body: IO[bytes], *, content_type: str = "application/json", **kw
@distributed_trace
def batch(self, body: Union[JSON, IO[bytes]], **kwargs: Any) -> JSON:
- # pylint: disable=line-too-long
"""Execute a batch of Analytics queries.
Executes a batch of Analytics queries for data. `Here
@@ -1393,30 +1237,19 @@ def batch(self, body: Union[JSON, IO[bytes]], **kwargs: Any) -> JSON:
"requests": [
{
"body": {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The
- timespan over which to query data. This is an ISO8601 time period
- value. This timespan is applied in addition to any that are
- specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to
- query in addition to the primary workspace.
+ "str"
]
},
- "id": "str", # Unique ID corresponding to each request in
- the batch. Required.
- "workspace": "str", # Primary Workspace ID of the query.
- This is the Workspace ID from the Properties blade in the Azure portal.
- Required.
+ "id": "str",
+ "workspace": "str",
"headers": {
- "str": "str" # Optional. Headers of the request. Can
- use prefer header to set server timeout and to query statistics and
- visualization information.
+ "str": "str"
},
- "method": "POST", # Optional. Default value is "POST". The
- method of a single request in a batch, defaults to POST.
- "path": "/query" # Optional. Default value is "/query". The
- query path of a single request in a batch, defaults to /query.
+ "method": "POST",
+ "path": "/query"
}
]
}
@@ -1427,71 +1260,50 @@ def batch(self, body: Union[JSON, IO[bytes]], **kwargs: Any) -> JSON:
{
"body": {
"error": {
- "code": "str", # A machine readable error
- code. Required.
- "message": "str", # A human readable error
- message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's
- code. Required.
- "message": "str", # A human
- readable error message. Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional.
- Indicates resources which were responsible for the
- error.
+ "str"
],
- "target": "str", # Optional.
- Indicates which property in the request is responsible
- for the error.
- "value": "str" # Optional.
- Indicates which value in 'target' is responsible for the
- error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON
- format.
- "statistics": {}, # Optional. Statistics represented
- in JSON format.
+ "render": {},
+ "statistics": {},
"tables": [
{
"columns": [
{
- "name": "str", # The
- name of this column. Required.
- "type": "str" # The
- data type of this column. Required. Known values are:
- "bool", "datetime", "dynamic", "int", "long", "real",
- "string", "guid", "decimal", and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the
- table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting
- rows from this query. Required.
+ {}
]
]
}
]
},
"headers": {
- "str": "str" # Optional. Dictionary of
- :code:``.
+ "str": "str"
},
- "id": "str", # Optional. An array of responses corresponding
- to each individual request in a batch.
- "status": 0 # Optional. An array of responses corresponding
- to each individual request in a batch.
+ "id": "str",
+ "status": 0
}
]
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1530,8 +1342,6 @@ def batch(self, body: Union[JSON, IO[bytes]], **kwargs: Any) -> JSON:
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -1549,7 +1359,6 @@ def batch(self, body: Union[JSON, IO[bytes]], **kwargs: Any) -> JSON:
def resource_get_xms(
self, resource_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource URI.
Executes an Analytics query for data in the context of a resource. `Here
@@ -1579,48 +1388,39 @@ def resource_get_xms(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1650,8 +1450,6 @@ def resource_get_xms(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -1675,7 +1473,6 @@ def resource_execute_xms(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource ID.
Executes an Analytics query for data in the context of a resource. `Here
@@ -1703,13 +1500,10 @@ def resource_execute_xms(
# JSON input template you can fill out and use as your body input.
body = {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The timespan over which to query
- data. This is an ISO8601 time period value. This timespan is applied in addition
- to any that are specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to query in addition to the
- primary workspace.
+ "str"
]
}
@@ -1719,45 +1513,36 @@ def resource_execute_xms(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
@@ -1771,7 +1556,6 @@ def resource_execute_xms(
content_type: str = "application/json",
**kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource ID.
Executes an Analytics query for data in the context of a resource. `Here
@@ -1803,45 +1587,36 @@ def resource_execute_xms(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
@@ -1849,7 +1624,6 @@ def resource_execute_xms(
def resource_execute_xms(
self, resource_id: str, body: Union[JSON, IO[bytes]], *, prefer: Optional[str] = None, **kwargs: Any
) -> JSON:
- # pylint: disable=line-too-long
"""Execute an Analytics query using resource ID.
Executes an Analytics query for data in the context of a resource. `Here
@@ -1874,13 +1648,10 @@ def resource_execute_xms(
# JSON input template you can fill out and use as your body input.
body = {
- "query": "str", # The query to execute. Required.
- "timespan": "str", # Optional. Optional. The timespan over which to query
- data. This is an ISO8601 time period value. This timespan is applied in addition
- to any that are specified in the query expression.
+ "query": "str",
+ "timespan": "str",
"workspaces": [
- "str" # Optional. A list of workspaces to query in addition to the
- primary workspace.
+ "str"
]
}
@@ -1890,48 +1661,39 @@ def resource_execute_xms(
{
"columns": [
{
- "name": "str", # The name of this column.
- Required.
- "type": "str" # The data type of this
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
+ "name": "str",
+ "type": "str"
}
],
- "name": "str", # The name of the table. Required.
+ "name": "str",
"rows": [
[
- {} # The resulting rows from this query.
- Required.
+ {}
]
]
}
],
"error": {
- "code": "str", # A machine readable error code. Required.
- "message": "str", # A human readable error message. Required.
+ "code": "str",
+ "message": "str",
"details": [
{
- "code": "str", # The error's code. Required.
- "message": "str", # A human readable error message.
- Required.
+ "code": "str",
+ "message": "str",
"resources": [
- "str" # Optional. Indicates resources which
- were responsible for the error.
+ "str"
],
- "target": "str", # Optional. Indicates which
- property in the request is responsible for the error.
- "value": "str" # Optional. Indicates which value in
- 'target' is responsible for the error.
+ "target": "str",
+ "value": "str"
}
],
"innererror": ...
},
- "render": {}, # Optional. Visualization data in JSON format.
- "statistics": {} # Optional. Statistics represented in JSON format.
+ "render": {},
+ "statistics": {}
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1972,8 +1734,6 @@ def resource_execute_xms(
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -2007,7 +1767,6 @@ def __init__(self, *args, **kwargs):
@distributed_trace
def get(self, workspace_id: str, **kwargs: Any) -> JSON:
- # pylint: disable=line-too-long
"""Gets metadata information.
Retrieve the metadata information for the workspace, including its schema, functions, workspace
@@ -2027,122 +1786,93 @@ def get(self, workspace_id: str, **kwargs: Any) -> JSON:
response == {
"applications": [
{
- "id": "str", # The ID of the Application Insights app.
- Required.
- "name": "str", # The name of the Application Insights app.
- Required.
- "region": "str", # The Azure region of the Application
- Insights app. Required.
- "resourceId": "str", # The ARM resource ID of the
- Application Insights app. Required.
+ "id": "str",
+ "name": "str",
+ "region": "str",
+ "resourceId": "str",
"related": {
"functions": [
- "str" # Optional. The related functions for
- the Application Insights app.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- Application Insights app.
+ "str"
]
}
}
],
"categories": [
{
- "displayName": "str", # The display name of the category.
- Required.
- "id": "str", # The ID of the category. Required.
- "description": "str", # Optional. The description of the
- category.
+ "displayName": "str",
+ "id": "str",
+ "description": "str",
"related": {
"functions": [
- "str" # Optional. The functions related to
- the category.
+ "str"
],
"queries": [
- "str" # Optional. The saved queries related
- to the category.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The resource types related
- to the category.
+ "str"
],
"solutions": [
- "str" # Optional. The Log Analytics
- solutions related to the category.
+ "str"
],
"tables": [
- "str" # Optional. The tables related to the
- category.
+ "str"
]
}
}
],
"functions": [
{
- "body": "str", # The KQL body of the function. Required.
- "id": "str", # The ID of the function. Required.
- "name": "str", # The name of the function, to be used in
- queries. Required.
- "description": "str", # Optional. The description of the
- function.
- "displayName": "str", # Optional. The display name of the
- function.
- "parameters": "str", # Optional. The parameters/arguments of
- the function, if any.
- "properties": {}, # Optional. The properties of the
- function.
+ "body": "str",
+ "id": "str",
+ "name": "str",
+ "description": "str",
+ "displayName": "str",
+ "parameters": "str",
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The related categories for
- the function.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the function.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the function.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- function.
+ "str"
],
"workspaces": [
- "str" # Optional. The related workspaces for
- the function.
+ "str"
]
},
- "tags": {} # Optional. The tags associated with the
- function.
+ "tags": {}
}
],
"permissions": [
{
"workspaces": [
{
- "resourceId": "str", # The resource ID on
- the permission indication. Required.
+ "resourceId": "str",
"denyTables": [
- "str" # Optional. The list of tables
- that were denied access for the resource ID.
+ "str"
]
}
],
"applications": [
{
- "resourceId": "str" # The resource ID on the
- permission indication. Required.
+ "resourceId": "str"
}
],
"resources": [
{
- "resourceId": "str", # The resource ID on
- the permission indication. Required.
+ "resourceId": "str",
"denyTables": [
- "str" # Optional. The list of tables
- that were denied access for the resource ID.
+ "str"
]
}
]
@@ -2150,221 +1880,164 @@ def get(self, workspace_id: str, **kwargs: Any) -> JSON:
],
"queries": [
{
- "body": "str", # The KQL body of the query. Required.
- "id": "str", # The ID of the query. Required.
- "description": "str", # Optional. The description of the
- query.
- "displayName": "str", # Optional. The display name of the
- query.
+ "body": "str",
+ "id": "str",
+ "description": "str",
+ "displayName": "str",
"labels": [
- "str" # Optional. The user defined labels associated
- with the query.
+ "str"
],
- "properties": {}, # Optional. The properties of the query.
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The related categories for
- the query.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the query.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the query.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- query.
+ "str"
]
},
- "tags": {} # Optional. The tags associated with the query.
+ "tags": {}
}
],
"resourceTypes": [
{
- "id": "str", # The ID of the resource-type. Required.
- "type": "str", # The type of the resource-type. Required.
- "description": "str", # Optional. The description of the
- resource-type.
- "displayName": "str", # Optional. The display name of the
- resource-type.
+ "id": "str",
+ "type": "str",
+ "description": "str",
+ "displayName": "str",
"labels": [
- "str" # Optional. The user-defined labels of the
- resource-type.
+ "str"
],
- "properties": {}, # Optional. The properties of the
- resource-type.
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The categories related to
- the resource-type.
+ "str"
],
"functions": [
- "str" # Optional. The functions related to
- the resource-type.
+ "str"
],
"queries": [
- "str" # Optional. The queries related to the
- resource-type.
+ "str"
],
"resources": [
- "str" # Optional. The Azure resources
- related to the resource-type.
+ "str"
],
"tables": [
- "str" # Optional. The tables related to the
- resource-type.
+ "str"
],
"workspaces": [
- "str" # Optional. The Log Analytics
- workspaces related to the resource-type.
+ "str"
]
},
- "tags": {} # Optional. The tags associated with the
- resource-type.
+ "tags": {}
}
],
"resources": [
- {} # Optional. The list of Azure resources that were referenced in
- the metadata request.
+ {}
],
"solutions": [
{
- "id": "str", # The ID of the Log Analytics solution.
- Required.
- "name": "str", # The name of the Log Analytics solution.
- Required.
+ "id": "str",
+ "name": "str",
"related": {
"tables": [
- "str" # The tables related to the Log
- Analytics solution. Required.
+ "str"
],
"categories": [
- "str" # Optional. The categories related to
- the Log Analytics solution.
+ "str"
],
"functions": [
- "str" # Optional. The functions related to
- the Log Analytics solution.
+ "str"
],
"queries": [
- "str" # Optional. The saved queries related
- to the Log Analytics solution.
+ "str"
],
"workspaces": [
- "str" # Optional. The Workspaces referenced
- in the metadata request that are related to the Log Analytics
- solution.
+ "str"
]
},
- "description": "str", # Optional. The description of the Log
- Analytics solution.
- "displayName": "str", # Optional. The display name of the
- Log Analytics solution.
- "properties": {}, # Optional. The properties of the Log
- Analytics solution.
- "tags": {} # Optional. The tags that are associated with the
- Log Analytics solution.
+ "description": "str",
+ "displayName": "str",
+ "properties": {},
+ "tags": {}
}
],
"tables": [
{
- "id": "str", # The ID of the table. Required.
- "name": "str", # The name of the table. Required.
+ "id": "str",
+ "name": "str",
"columns": [
{
- "name": "str", # The name of the column.
- Required.
- "type": "str", # The data type of the
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
- "description": "str", # Optional. The
- description of the column.
- "isPreferredFacet": bool, # Optional. A flag
- indicating this column is a preferred facet.
- "source": {} # Optional. an indication of
- the source of the column, used only when multiple workspaces have
- conflicting definition for the column.
+ "name": "str",
+ "type": "str",
+ "description": "str",
+ "isPreferredFacet": bool,
+ "source": {}
}
],
- "description": "str", # Optional. The description of the
- table.
+ "description": "str",
"labels": [
- "str" # Optional. The user defined labels of the
- table.
+ "str"
],
- "properties": {}, # Optional. The properties of the table.
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The related categories for
- the table.
+ "str"
],
"functions": [
- "str" # Optional. The related functions for
- the table.
+ "str"
],
"queries": [
- "str" # Optional. The related saved queries
- for the table.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the table.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the table.
+ "str"
],
"workspaces": [
- "str" # Optional. The related Log Analytics
- workspaces for the table.
+ "str"
]
},
- "tags": {}, # Optional. The tags associated with the table.
- "timespanColumn": "str" # Optional. The column associated
- with the timespan query parameter for the table.
+ "tags": {},
+ "timespanColumn": "str"
}
],
"workspaces": [
{
- "id": "str", # The ID of the Log Analytics workspace.
- Required.
- "name": "str", # The name of the Log Analytics workspace.
- Required.
- "region": "str", # The Azure region of the Log Analytics
- workspace. Required.
- "resourceId": "str", # The ARM resource ID of the Log
- Analytics workspace. Required.
+ "id": "str",
+ "name": "str",
+ "region": "str",
+ "resourceId": "str",
"related": {
"functions": [
- "str" # Optional. The related functions for
- the Log Analytics workspace.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the Log Analytics workspace.
+ "str"
],
"resources": [
- "str" # Optional. The related Azure
- resources for the Log Analytics workspace.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the Log Analytics workspace.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- Log Analytics workspace.
+ "str"
]
}
}
]
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2392,8 +2065,6 @@ def get(self, workspace_id: str, **kwargs: Any) -> JSON:
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -2409,7 +2080,6 @@ def get(self, workspace_id: str, **kwargs: Any) -> JSON:
@distributed_trace
def post(self, workspace_id: str, **kwargs: Any) -> JSON:
- # pylint: disable=line-too-long
"""Gets metadata information.
Retrieve the metadata information for the workspace, including its schema, functions, workspace
@@ -2429,122 +2099,93 @@ def post(self, workspace_id: str, **kwargs: Any) -> JSON:
response == {
"applications": [
{
- "id": "str", # The ID of the Application Insights app.
- Required.
- "name": "str", # The name of the Application Insights app.
- Required.
- "region": "str", # The Azure region of the Application
- Insights app. Required.
- "resourceId": "str", # The ARM resource ID of the
- Application Insights app. Required.
+ "id": "str",
+ "name": "str",
+ "region": "str",
+ "resourceId": "str",
"related": {
"functions": [
- "str" # Optional. The related functions for
- the Application Insights app.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- Application Insights app.
+ "str"
]
}
}
],
"categories": [
{
- "displayName": "str", # The display name of the category.
- Required.
- "id": "str", # The ID of the category. Required.
- "description": "str", # Optional. The description of the
- category.
+ "displayName": "str",
+ "id": "str",
+ "description": "str",
"related": {
"functions": [
- "str" # Optional. The functions related to
- the category.
+ "str"
],
"queries": [
- "str" # Optional. The saved queries related
- to the category.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The resource types related
- to the category.
+ "str"
],
"solutions": [
- "str" # Optional. The Log Analytics
- solutions related to the category.
+ "str"
],
"tables": [
- "str" # Optional. The tables related to the
- category.
+ "str"
]
}
}
],
"functions": [
{
- "body": "str", # The KQL body of the function. Required.
- "id": "str", # The ID of the function. Required.
- "name": "str", # The name of the function, to be used in
- queries. Required.
- "description": "str", # Optional. The description of the
- function.
- "displayName": "str", # Optional. The display name of the
- function.
- "parameters": "str", # Optional. The parameters/arguments of
- the function, if any.
- "properties": {}, # Optional. The properties of the
- function.
+ "body": "str",
+ "id": "str",
+ "name": "str",
+ "description": "str",
+ "displayName": "str",
+ "parameters": "str",
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The related categories for
- the function.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the function.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the function.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- function.
+ "str"
],
"workspaces": [
- "str" # Optional. The related workspaces for
- the function.
+ "str"
]
},
- "tags": {} # Optional. The tags associated with the
- function.
+ "tags": {}
}
],
"permissions": [
{
"workspaces": [
{
- "resourceId": "str", # The resource ID on
- the permission indication. Required.
+ "resourceId": "str",
"denyTables": [
- "str" # Optional. The list of tables
- that were denied access for the resource ID.
+ "str"
]
}
],
"applications": [
{
- "resourceId": "str" # The resource ID on the
- permission indication. Required.
+ "resourceId": "str"
}
],
"resources": [
{
- "resourceId": "str", # The resource ID on
- the permission indication. Required.
+ "resourceId": "str",
"denyTables": [
- "str" # Optional. The list of tables
- that were denied access for the resource ID.
+ "str"
]
}
]
@@ -2552,221 +2193,164 @@ def post(self, workspace_id: str, **kwargs: Any) -> JSON:
],
"queries": [
{
- "body": "str", # The KQL body of the query. Required.
- "id": "str", # The ID of the query. Required.
- "description": "str", # Optional. The description of the
- query.
- "displayName": "str", # Optional. The display name of the
- query.
+ "body": "str",
+ "id": "str",
+ "description": "str",
+ "displayName": "str",
"labels": [
- "str" # Optional. The user defined labels associated
- with the query.
+ "str"
],
- "properties": {}, # Optional. The properties of the query.
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The related categories for
- the query.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the query.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the query.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- query.
+ "str"
]
},
- "tags": {} # Optional. The tags associated with the query.
+ "tags": {}
}
],
"resourceTypes": [
{
- "id": "str", # The ID of the resource-type. Required.
- "type": "str", # The type of the resource-type. Required.
- "description": "str", # Optional. The description of the
- resource-type.
- "displayName": "str", # Optional. The display name of the
- resource-type.
+ "id": "str",
+ "type": "str",
+ "description": "str",
+ "displayName": "str",
"labels": [
- "str" # Optional. The user-defined labels of the
- resource-type.
+ "str"
],
- "properties": {}, # Optional. The properties of the
- resource-type.
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The categories related to
- the resource-type.
+ "str"
],
"functions": [
- "str" # Optional. The functions related to
- the resource-type.
+ "str"
],
"queries": [
- "str" # Optional. The queries related to the
- resource-type.
+ "str"
],
"resources": [
- "str" # Optional. The Azure resources
- related to the resource-type.
+ "str"
],
"tables": [
- "str" # Optional. The tables related to the
- resource-type.
+ "str"
],
"workspaces": [
- "str" # Optional. The Log Analytics
- workspaces related to the resource-type.
+ "str"
]
},
- "tags": {} # Optional. The tags associated with the
- resource-type.
+ "tags": {}
}
],
"resources": [
- {} # Optional. The list of Azure resources that were referenced in
- the metadata request.
+ {}
],
"solutions": [
{
- "id": "str", # The ID of the Log Analytics solution.
- Required.
- "name": "str", # The name of the Log Analytics solution.
- Required.
+ "id": "str",
+ "name": "str",
"related": {
"tables": [
- "str" # The tables related to the Log
- Analytics solution. Required.
+ "str"
],
"categories": [
- "str" # Optional. The categories related to
- the Log Analytics solution.
+ "str"
],
"functions": [
- "str" # Optional. The functions related to
- the Log Analytics solution.
+ "str"
],
"queries": [
- "str" # Optional. The saved queries related
- to the Log Analytics solution.
+ "str"
],
"workspaces": [
- "str" # Optional. The Workspaces referenced
- in the metadata request that are related to the Log Analytics
- solution.
+ "str"
]
},
- "description": "str", # Optional. The description of the Log
- Analytics solution.
- "displayName": "str", # Optional. The display name of the
- Log Analytics solution.
- "properties": {}, # Optional. The properties of the Log
- Analytics solution.
- "tags": {} # Optional. The tags that are associated with the
- Log Analytics solution.
+ "description": "str",
+ "displayName": "str",
+ "properties": {},
+ "tags": {}
}
],
"tables": [
{
- "id": "str", # The ID of the table. Required.
- "name": "str", # The name of the table. Required.
+ "id": "str",
+ "name": "str",
"columns": [
{
- "name": "str", # The name of the column.
- Required.
- "type": "str", # The data type of the
- column. Required. Known values are: "bool", "datetime",
- "dynamic", "int", "long", "real", "string", "guid", "decimal",
- and "timespan".
- "description": "str", # Optional. The
- description of the column.
- "isPreferredFacet": bool, # Optional. A flag
- indicating this column is a preferred facet.
- "source": {} # Optional. an indication of
- the source of the column, used only when multiple workspaces have
- conflicting definition for the column.
+ "name": "str",
+ "type": "str",
+ "description": "str",
+ "isPreferredFacet": bool,
+ "source": {}
}
],
- "description": "str", # Optional. The description of the
- table.
+ "description": "str",
"labels": [
- "str" # Optional. The user defined labels of the
- table.
+ "str"
],
- "properties": {}, # Optional. The properties of the table.
+ "properties": {},
"related": {
"categories": [
- "str" # Optional. The related categories for
- the table.
+ "str"
],
"functions": [
- "str" # Optional. The related functions for
- the table.
+ "str"
],
"queries": [
- "str" # Optional. The related saved queries
- for the table.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the table.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the table.
+ "str"
],
"workspaces": [
- "str" # Optional. The related Log Analytics
- workspaces for the table.
+ "str"
]
},
- "tags": {}, # Optional. The tags associated with the table.
- "timespanColumn": "str" # Optional. The column associated
- with the timespan query parameter for the table.
+ "tags": {},
+ "timespanColumn": "str"
}
],
"workspaces": [
{
- "id": "str", # The ID of the Log Analytics workspace.
- Required.
- "name": "str", # The name of the Log Analytics workspace.
- Required.
- "region": "str", # The Azure region of the Log Analytics
- workspace. Required.
- "resourceId": "str", # The ARM resource ID of the Log
- Analytics workspace. Required.
+ "id": "str",
+ "name": "str",
+ "region": "str",
+ "resourceId": "str",
"related": {
"functions": [
- "str" # Optional. The related functions for
- the Log Analytics workspace.
+ "str"
],
"resourceTypes": [
- "str" # Optional. The related resource types
- for the Log Analytics workspace.
+ "str"
],
"resources": [
- "str" # Optional. The related Azure
- resources for the Log Analytics workspace.
+ "str"
],
"solutions": [
- "str" # Optional. The related Log Analytics
- solutions for the Log Analytics workspace.
+ "str"
],
"tables": [
- "str" # Optional. The related tables for the
- Log Analytics workspace.
+ "str"
]
}
}
]
}
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2794,8 +2378,6 @@ def post(self, workspace_id: str, **kwargs: Any) -> JSON:
response = pipeline_response.http_response
if response.status_code not in [200]:
- if _stream:
- response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)