From 10bdc7c1fd202dd8c0aea6fa5a787ac21ddc6d90 Mon Sep 17 00:00:00 2001 From: Paul Van Eck Date: Mon, 6 Apr 2026 19:45:31 +0000 Subject: [PATCH 1/2] [Monitor Query] Regenerate from typespec Regeneration introduces updates and improvements to generated code. - Moves the project to by pyproject.toml based - Fixes pyright failures in CI Signed-off-by: Paul Van Eck --- .../azure-monitor-query/_metadata.json | 5 +- .../apiview-properties.json | 4 +- .../azure/monitor/query/_client.py | 6 +- .../azure/monitor/query/_configuration.py | 4 +- .../monitor/query/_operations/__init__.py | 1 + .../monitor/query/_operations/_operations.py | 47 ++-- .../azure/monitor/query/_utils/model_base.py | 208 ++++++++++++++---- .../monitor/query/_utils/serialization.py | 43 ++-- .../azure/monitor/query/aio/_client.py | 6 +- .../azure/monitor/query/aio/_configuration.py | 4 +- .../monitor/query/aio/_operations/__init__.py | 1 + .../query/aio/_operations/_operations.py | 47 ++-- .../azure/monitor/query/models/__init__.py | 12 +- .../azure/monitor/query/models/_enums.py | 20 +- .../azure/monitor/query/models/_models.py | 108 +++++---- .../azure-monitor-query/pyproject.toml | 62 ++++++ sdk/monitor/azure-monitor-query/setup.py | 72 ------ .../azure-monitor-query/tsp-location.yaml | 4 +- 18 files changed, 395 insertions(+), 259 deletions(-) delete mode 100644 sdk/monitor/azure-monitor-query/setup.py diff --git a/sdk/monitor/azure-monitor-query/_metadata.json b/sdk/monitor/azure-monitor-query/_metadata.json index 539264d9b9bb..797792440832 100644 --- a/sdk/monitor/azure-monitor-query/_metadata.json +++ b/sdk/monitor/azure-monitor-query/_metadata.json @@ -1,3 +1,6 @@ { - "apiVersion": "v1" + "apiVersion": "v1", + "apiVersions": { + "MonitorQueryLogs": "v1" + } } \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-query/apiview-properties.json b/sdk/monitor/azure-monitor-query/apiview-properties.json index ad6da1dd065a..bf4be110fbb5 100644 --- a/sdk/monitor/azure-monitor-query/apiview-properties.json +++ b/sdk/monitor/azure-monitor-query/apiview-properties.json @@ -1,6 +1,4 @@ { "CrossLanguagePackageId": "MonitorQueryLogs", - "CrossLanguageDefinitionId": { - "azure.monitor.query.models._ColumnType": "MonitorQueryLogs.ColumnDataType" - } + "CrossLanguageDefinitionId": {} } \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_client.py index 660e4bc4067c..cb6663709768 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_client.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_client.py @@ -15,7 +15,7 @@ from azure.core.rest import HttpRequest, HttpResponse from ._configuration import MonitorQueryLogsClientConfiguration -from ._operations._operations import _MonitorQueryLogsClientOperationsMixin +from ._operations import _MonitorQueryLogsClientOperationsMixin from ._utils.serialization import Deserializer, Serializer if TYPE_CHECKING: @@ -30,8 +30,8 @@ class MonitorQueryLogsClient(_MonitorQueryLogsClientOperationsMixin): :keyword endpoint: The Log Analytics service endpoint. Default value is "https://api.loganalytics.io". :paramtype endpoint: str - :keyword api_version: The service API version. Known values are "v1" and None. Default value is - "v1". Note that overriding this default value may result in unsupported behavior. + :keyword api_version: The service API version. Known values are "v1". Default value is "v1". + Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str or ~azure.monitor.query.models.Versions """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_configuration.py index 6ee9ddf5b776..4cba4d134ec6 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_configuration.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_configuration.py @@ -27,8 +27,8 @@ class MonitorQueryLogsClientConfiguration: # pylint: disable=too-many-instance- :param endpoint: The Log Analytics service endpoint. Default value is "https://api.loganalytics.io". :type endpoint: str - :keyword api_version: The service API version. Known values are "v1" and None. Default value is - "v1". Note that overriding this default value may result in unsupported behavior. + :keyword api_version: The service API version. Known values are "v1". Default value is "v1". + Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str or ~azure.monitor.query.models.Versions """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/__init__.py index 933fcd7d1b55..9e1931184851 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/__init__.py @@ -12,6 +12,7 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import +from ._operations import _MonitorQueryLogsClientOperationsMixin # type: ignore # pylint: disable=unused-import from ._patch import __all__ as _patch_all from ._patch import * diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/_operations.py index c585b83bf549..9e79ef9b9fbc 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/_operations.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/_operations.py @@ -5,11 +5,10 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -# pylint: disable=protected-access from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload from azure.core import PipelineClient from azure.core.exceptions import ( @@ -35,7 +34,7 @@ JSON = MutableMapping[str, Any] T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -156,9 +155,9 @@ def _execute( ) -> _models._models.QueryResults: """Execute an Analytics query. - Executes an Analytics query for data. - `Here `_ - is an example for using POST with an Analytics query. + Executes an Analytics query for data. `Here + `_ is an example for + using POST with an Analytics query. :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the Properties @@ -211,6 +210,7 @@ def _execute( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -225,11 +225,14 @@ def _execute( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models._models.ErrorResponse, response.json()) + error = _failsafe_deserialize( + _models._models.ErrorResponse, # pylint: disable=protected-access + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize( _models._models.QueryResults, response.json() # pylint: disable=protected-access @@ -282,9 +285,9 @@ def _execute_with_resource_id( ) -> _models._models.QueryResults: """Execute an Analytics query using resource ID. - Executes an Analytics query for data in the context of a resource. - `Here `_ - is an example for using POST with an Analytics query. + Executes an Analytics query for data in the context of a resource. `Here + `_ is an + example for using POST with an Analytics query. :param resource_id: The identifier of the resource. Required. :type resource_id: str @@ -335,6 +338,7 @@ def _execute_with_resource_id( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -349,11 +353,14 @@ def _execute_with_resource_id( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models._models.ErrorResponse, response.json()) + error = _failsafe_deserialize( + _models._models.ErrorResponse, # pylint: disable=protected-access + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize( _models._models.QueryResults, response.json() # pylint: disable=protected-access @@ -383,9 +390,9 @@ def _batch( ) -> _models._models.BatchResponse: """Execute a batch of Analytics queries. - Executes a batch of Analytics queries for data. - `Here `_ - is an example for using POST with an Analytics query. + Executes a batch of Analytics queries for data. `Here + `_ is an example for + using POST with an Analytics query. :param body: The batch request body. Is one of the following types: BatchRequest, JSON, IO[bytes] Required. @@ -427,6 +434,7 @@ def _batch( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -441,11 +449,14 @@ def _batch( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models._models.ErrorResponse, response.json()) + error = _failsafe_deserialize( + _models._models.ErrorResponse, # pylint: disable=protected-access + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize( _models._models.BatchResponse, response.json() # pylint: disable=protected-access diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/model_base.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/model_base.py index 49d5c7259389..7b7f8ba67b53 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/model_base.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/model_base.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -29,6 +29,7 @@ from azure.core import CaseInsensitiveEnumMeta from azure.core.pipeline import PipelineResponse from azure.core.serialization import _Null +from azure.core.rest import HttpResponse _LOGGER = logging.getLogger(__name__) @@ -36,6 +37,7 @@ TZ_UTC = timezone.utc _T = typing.TypeVar("_T") +_NONE_TYPE = type(None) def _timedelta_as_isostr(td: timedelta) -> str: @@ -170,6 +172,21 @@ def default(self, o): # pylint: disable=too-many-return-statements r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" ) +_ARRAY_ENCODE_MAPPING = { + "pipeDelimited": "|", + "spaceDelimited": " ", + "commaDelimited": ",", + "newlineDelimited": "\n", +} + + +def _deserialize_array_encoded(delimit: str, attr): + if isinstance(attr, str): + if attr == "": + return [] + return attr.split(delimit) + return attr + def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: """Deserialize ISO-8601 formatted string into Datetime object. @@ -201,7 +218,7 @@ def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: test_utc = date_obj.utctimetuple() if test_utc.tm_year > 9999 or test_utc.tm_year < 1: raise OverflowError("Hit max or min date") - return date_obj + return date_obj # type: ignore[no-any-return] def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: @@ -255,7 +272,7 @@ def _deserialize_time(attr: typing.Union[str, time]) -> time: """ if isinstance(attr, time): return attr - return isodate.parse_time(attr) + return isodate.parse_time(attr) # type: ignore[no-any-return] def _deserialize_bytes(attr): @@ -314,6 +331,8 @@ def _deserialize_int_as_str(attr): def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): if annotation is int and rf and rf._format == "str": return _deserialize_int_as_str + if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING: + return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format]) if rf and rf._format: return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore @@ -345,16 +364,46 @@ def _get_model(module_name: str, model_name: str): class _MyMutableMapping(MutableMapping[str, typing.Any]): - def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + def __init__(self, data: dict[str, typing.Any]) -> None: self._data = data def __contains__(self, key: typing.Any) -> bool: return key in self._data def __getitem__(self, key: str) -> typing.Any: + # If this key has been deserialized (for mutable types), we need to handle serialization + if hasattr(self, "_attr_to_rest_field"): + cache_attr = f"_deserialized_{key}" + if hasattr(self, cache_attr): + rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key) + if rf: + value = self._data.get(key) + if isinstance(value, (dict, list, set)): + # For mutable types, serialize and return + # But also update _data with serialized form and clear flag + # so mutations via this returned value affect _data + serialized = _serialize(value, rf._format) + # If serialized form is same type (no transformation needed), + # return _data directly so mutations work + if isinstance(serialized, type(value)) and serialized == value: + return self._data.get(key) + # Otherwise return serialized copy and clear flag + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + # Store serialized form back + self._data[key] = serialized + return serialized return self._data.__getitem__(key) def __setitem__(self, key: str, value: typing.Any) -> None: + # Clear any cached deserialized value when setting through dictionary access + cache_attr = f"_deserialized_{key}" + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass self._data.__setitem__(key, value) def __delitem__(self, key: str) -> None: @@ -425,7 +474,7 @@ def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: return self._data.pop(key) return self._data.pop(key, default) - def popitem(self) -> typing.Tuple[str, typing.Any]: + def popitem(self) -> tuple[str, typing.Any]: """ Removes and returns some (key, value) pair :returns: The (key, value) pair. @@ -466,6 +515,8 @@ def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: return self._data.setdefault(key, default) def __eq__(self, other: typing.Any) -> bool: + if isinstance(other, _MyMutableMapping): + return self._data == other._data try: other_model = self.__class__(other) except Exception: @@ -482,6 +533,8 @@ def _is_model(obj: typing.Any) -> bool: def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements if isinstance(o, list): + if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o): + return _ARRAY_ENCODE_MAPPING[format].join(o) return [_serialize(x, format) for x in o] if isinstance(o, dict): return {k: _serialize(v, format) for k, v in o.items()} @@ -513,9 +566,7 @@ def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-m return o -def _get_rest_field( - attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str -) -> typing.Optional["_RestField"]: +def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: try: return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) except StopIteration: @@ -538,7 +589,7 @@ class Model(_MyMutableMapping): _is_model = True # label whether current class's _attr_to_rest_field has been calculated # could not see _attr_to_rest_field directly because subclass inherits it from parent class - _calculated: typing.Set[str] = set() + _calculated: set[str] = set() def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: class_name = self.__class__.__name__ @@ -579,6 +630,9 @@ def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: if len(items) > 0: existed_attr_keys.append(xml_name) dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + elif not rf._is_optional: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = [] continue # text element is primitive type @@ -623,7 +677,7 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order - attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") } annotations = { @@ -638,7 +692,7 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) if not rf._rest_name_input: rf._rest_name_input = attr - cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") return super().__new__(cls) @@ -680,7 +734,7 @@ def _deserialize(cls, data, exist_discriminators): mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member return mapped_cls._deserialize(data, exist_discriminators) - def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: """Return a dict that can be turned into json using json.dump. :keyword bool exclude_readonly: Whether to remove the readonly properties. @@ -740,7 +794,7 @@ def _deserialize_with_union(deserializers, obj): def _deserialize_dict( value_deserializer: typing.Optional[typing.Callable], module: typing.Optional[str], - obj: typing.Dict[typing.Any, typing.Any], + obj: dict[typing.Any, typing.Any], ): if obj is None: return obj @@ -750,7 +804,7 @@ def _deserialize_dict( def _deserialize_multiple_sequence( - entry_deserializers: typing.List[typing.Optional[typing.Callable]], + entry_deserializers: list[typing.Optional[typing.Callable]], module: typing.Optional[str], obj, ): @@ -759,6 +813,14 @@ def _deserialize_multiple_sequence( return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) +def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool: + return ( + isinstance(deserializer, functools.partial) + and isinstance(deserializer.args[0], functools.partial) + and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable + ) + + def _deserialize_sequence( deserializer: typing.Optional[typing.Callable], module: typing.Optional[str], @@ -768,17 +830,30 @@ def _deserialize_sequence( return obj if isinstance(obj, ET.Element): obj = list(obj) + + # encoded string may be deserialized to sequence + if isinstance(obj, str) and isinstance(deserializer, functools.partial): + # for list[str] + if _is_array_encoded_deserializer(deserializer): + return deserializer(obj) + + # for list[Union[...]] + if isinstance(deserializer.args[0], list): + for sub_deserializer in deserializer.args[0]: + if _is_array_encoded_deserializer(sub_deserializer): + return sub_deserializer(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) -def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: +def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: return sorted( types, key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), ) -def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches annotation: typing.Any, module: typing.Optional[str], rf: typing.Optional["_RestField"] = None, @@ -818,16 +893,18 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur # is it optional? try: - if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore + if rf: + rf._is_optional = True if len(annotation.__args__) <= 2: # pyright: ignore if_obj_deserializer = _get_deserialize_callable_from_annotation( - next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore ) return functools.partial(_deserialize_with_optional, if_obj_deserializer) # the type is Optional[Union[...]], we need to remove the None type from the Union annotation_copy = copy.copy(annotation) - annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) except AttributeError: pass @@ -843,7 +920,10 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur return functools.partial(_deserialize_with_union, deserializers) try: - if annotation._name == "Dict": # pyright: ignore + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() == "dict": value_deserializer = _get_deserialize_callable_from_annotation( annotation.__args__[1], module, rf # pyright: ignore ) @@ -856,7 +936,10 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur except (AttributeError, IndexError): pass try: - if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: if len(annotation.__args__) > 1: # pyright: ignore entry_deserializers = [ _get_deserialize_callable_from_annotation(dt, module, rf) @@ -905,16 +988,20 @@ def _deserialize_with_callable( return float(value.text) if value.text else None if deserializer is bool: return value.text == "true" if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING.values(): + return deserializer(value.text) if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values(): + return deserializer(value.text) if value.text else None if deserializer is None: return value if deserializer in [int, float, bool]: return deserializer(value) if isinstance(deserializer, CaseInsensitiveEnumMeta): try: - return deserializer(value) + return deserializer(value.text if isinstance(value, ET.Element) else value) except ValueError: # for unknown value, return raw value - return value + return value.text if isinstance(value, ET.Element) else value if isinstance(deserializer, type) and issubclass(deserializer, Model): return deserializer._deserialize(value, []) return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) @@ -940,14 +1027,14 @@ def _deserialize( def _failsafe_deserialize( deserializer: typing.Any, - value: typing.Any, + response: HttpResponse, module: typing.Optional[str] = None, rf: typing.Optional["_RestField"] = None, format: typing.Optional[str] = None, ) -> typing.Any: try: - return _deserialize(deserializer, value, module, rf, format) - except DeserializationError: + return _deserialize(deserializer, response.json(), module, rf, format) + except Exception: # pylint: disable=broad-except _LOGGER.warning( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) @@ -956,17 +1043,18 @@ def _failsafe_deserialize( def _failsafe_deserialize_xml( deserializer: typing.Any, - value: typing.Any, + response: HttpResponse, ) -> typing.Any: try: - return _deserialize_xml(deserializer, value) - except DeserializationError: + return _deserialize_xml(deserializer, response.text()) + except Exception: # pylint: disable=broad-except _LOGGER.warning( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) return None +# pylint: disable=too-many-instance-attributes class _RestField: def __init__( self, @@ -974,11 +1062,11 @@ def __init__( name: typing.Optional[str] = None, type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin is_discriminator: bool = False, - visibility: typing.Optional[typing.List[str]] = None, + visibility: typing.Optional[list[str]] = None, default: typing.Any = _UNSET, format: typing.Optional[str] = None, is_multipart_file_input: bool = False, - xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, ): self._type = type self._rest_name_input = name @@ -986,6 +1074,7 @@ def __init__( self._is_discriminator = is_discriminator self._visibility = visibility self._is_model = False + self._is_optional = False self._default = default self._format = format self._is_multipart_file_input = is_multipart_file_input @@ -993,7 +1082,11 @@ def __init__( @property def _class_type(self) -> typing.Any: - return getattr(self._type, "args", [None])[0] + result = getattr(self._type, "args", [None])[0] + # type may be wrapped by nested functools.partial so we need to check for that + if isinstance(result, functools.partial): + return getattr(result, "args", [None])[0] + return result @property def _rest_name(self) -> str: @@ -1004,14 +1097,37 @@ def _rest_name(self) -> str: def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin # by this point, type and rest_name will have a value bc we default # them in __new__ of the Model class - item = obj.get(self._rest_name) + # Use _data.get() directly to avoid triggering __getitem__ which clears the cache + item = obj._data.get(self._rest_name) if item is None: return item if self._is_model: return item - return _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, we want mutations to directly affect _data + # Check if we've already deserialized this value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + # Return the value from _data directly (it's been deserialized in place) + return obj._data.get(self._rest_name) + + deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, store the deserialized value back in _data + # so mutations directly affect _data + if isinstance(deserialized, (dict, list, set)): + obj._data[self._rest_name] = deserialized + object.__setattr__(obj, cache_attr, True) # Mark as deserialized + return deserialized + + return deserialized def __set__(self, obj: Model, value) -> None: + # Clear the cached deserialized object when setting a new value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + object.__delattr__(obj, cache_attr) + if value is None: # we want to wipe out entries if users set attr to None try: @@ -1036,11 +1152,11 @@ def rest_field( *, name: typing.Optional[str] = None, type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin - visibility: typing.Optional[typing.List[str]] = None, + visibility: typing.Optional[list[str]] = None, default: typing.Any = _UNSET, format: typing.Optional[str] = None, is_multipart_file_input: bool = False, - xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, ) -> typing.Any: return _RestField( name=name, @@ -1057,8 +1173,8 @@ def rest_discriminator( *, name: typing.Optional[str] = None, type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin - visibility: typing.Optional[typing.List[str]] = None, - xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + visibility: typing.Optional[list[str]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, ) -> typing.Any: return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) @@ -1077,9 +1193,9 @@ def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: def _get_element( o: typing.Any, exclude_readonly: bool = False, - parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + parent_meta: typing.Optional[dict[str, typing.Any]] = None, wrapped_element: typing.Optional[ET.Element] = None, -) -> typing.Union[ET.Element, typing.List[ET.Element]]: +) -> typing.Union[ET.Element, list[ET.Element]]: if _is_model(o): model_meta = getattr(o, "_xml", {}) @@ -1168,7 +1284,7 @@ def _get_element( def _get_wrapped_element( v: typing.Any, exclude_readonly: bool, - meta: typing.Optional[typing.Dict[str, typing.Any]], + meta: typing.Optional[dict[str, typing.Any]], ) -> ET.Element: wrapped_element = _create_xml_element( meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None @@ -1179,7 +1295,7 @@ def _get_wrapped_element( _get_element(v, exclude_readonly, meta, wrapped_element) else: wrapped_element.text = _get_primitive_type_value(v) - return wrapped_element + return wrapped_element # type: ignore[no-any-return] def _get_primitive_type_value(v) -> str: @@ -1192,7 +1308,9 @@ def _get_primitive_type_value(v) -> str: return str(v) -def _create_xml_element(tag, prefix=None, ns=None): +def _create_xml_element( + tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None +) -> ET.Element: if prefix and ns: ET.register_namespace(prefix, ns) if ns: @@ -1211,7 +1329,7 @@ def _deserialize_xml( def _convert_element(e: ET.Element): # dict case if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: - dict_result: typing.Dict[str, typing.Any] = {} + dict_result: dict[str, typing.Any] = {} for child in e: if dict_result.get(child.tag) is not None: if isinstance(dict_result[child.tag], list): @@ -1224,7 +1342,7 @@ def _convert_element(e: ET.Element): return dict_result # array case if len(e) > 0: - array_result: typing.List[typing.Any] = [] + array_result: list[typing.Any] = [] for child in e: array_result.append(_convert_element(child)) return array_result diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/serialization.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/serialization.py index eb86ea23c965..81ec1de5922b 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/serialization.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/serialization.py @@ -21,7 +21,6 @@ import sys import codecs from typing import ( - Dict, Any, cast, Optional, @@ -31,7 +30,6 @@ Mapping, Callable, MutableMapping, - List, ) try: @@ -229,12 +227,12 @@ class Model: serialization and deserialization. """ - _subtype_map: Dict[str, Dict[str, Any]] = {} - _attribute_map: Dict[str, Dict[str, Any]] = {} - _validation: Dict[str, Dict[str, Any]] = {} + _subtype_map: dict[str, dict[str, Any]] = {} + _attribute_map: dict[str, dict[str, Any]] = {} + _validation: dict[str, dict[str, Any]] = {} def __init__(self, **kwargs: Any) -> None: - self.additional_properties: Optional[Dict[str, Any]] = {} + self.additional_properties: Optional[dict[str, Any]] = {} for k in kwargs: # pylint: disable=consider-using-dict-items if k not in self._attribute_map: _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) @@ -311,7 +309,7 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: def as_dict( self, keep_readonly: bool = True, - key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer, **kwargs: Any ) -> JSON: """Return a dict that can be serialized using json.dump. @@ -380,7 +378,7 @@ def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: def from_dict( cls, data: Any, - key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None, content_type: Optional[str] = None, ) -> Self: """Parse a dict using given key extractor return a model. @@ -414,7 +412,7 @@ def _flatten_subtype(cls, key, objects): return {} result = dict(cls._subtype_map[key]) for valuetype in cls._subtype_map[key].values(): - result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access return result @classmethod @@ -528,7 +526,7 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: "[]": self.serialize_iter, "{}": self.serialize_dict, } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.dependencies: dict[str, type] = dict(classes) if classes else {} self.key_transformer = full_restapi_key_transformer self.client_side_validation = True @@ -579,7 +577,7 @@ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, to if attr_name == "additional_properties" and attr_desc["key"] == "": if target_obj.additional_properties is not None: - serialized.update(target_obj.additional_properties) + serialized |= target_obj.additional_properties continue try: @@ -789,7 +787,7 @@ def serialize_data(self, data, data_type, **kwargs): # If dependencies is empty, try with current data class # It has to be a subclass of Enum anyway - enum_type = self.dependencies.get(data_type, data.__class__) + enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) if issubclass(enum_type, Enum): return Serializer.serialize_enum(data, enum_obj=enum_type) @@ -823,13 +821,20 @@ def serialize_basic(cls, data, data_type, **kwargs): :param str data_type: Type of object in the iterable. :rtype: str, int, float, bool :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) @classmethod def serialize_unicode(cls, data): @@ -1184,7 +1189,7 @@ def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argumen while "." in key: # Need the cast, as for some reasons "split" is typed as list[str | Any] - dict_keys = cast(List[str], _FLATTEN.split(key)) + dict_keys = cast(list[str], _FLATTEN.split(key)) if len(dict_keys) == 1: key = _decode_attribute_map_key(dict_keys[0]) break @@ -1386,7 +1391,7 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: "duration": (isodate.Duration, datetime.timedelta), "iso-8601": (datetime.datetime), } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.dependencies: dict[str, type] = dict(classes) if classes else {} self.key_extractors = [rest_key_extractor, xml_key_extractor] # Additional properties only works if the "rest_key_extractor" is used to # extract the keys. Making it to work whatever the key extractor is too much @@ -1759,7 +1764,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return :param str data_type: deserialization data type. :return: Deserialized basic type. :rtype: str, int, float or bool - :raises TypeError: if string format is not valid. + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1785,7 +1790,11 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) @staticmethod def deserialize_unicode(data): diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_client.py index e3975d45c2fc..81cb7a07a203 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_client.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_client.py @@ -16,7 +16,7 @@ from .._utils.serialization import Deserializer, Serializer from ._configuration import MonitorQueryLogsClientConfiguration -from ._operations._operations import _MonitorQueryLogsClientOperationsMixin +from ._operations import _MonitorQueryLogsClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -30,8 +30,8 @@ class MonitorQueryLogsClient(_MonitorQueryLogsClientOperationsMixin): :keyword endpoint: The Log Analytics service endpoint. Default value is "https://api.loganalytics.io". :paramtype endpoint: str - :keyword api_version: The service API version. Known values are "v1" and None. Default value is - "v1". Note that overriding this default value may result in unsupported behavior. + :keyword api_version: The service API version. Known values are "v1". Default value is "v1". + Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str or ~azure.monitor.query.models.Versions """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_configuration.py index d9eadf4816e5..29c9497eda74 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_configuration.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_configuration.py @@ -27,8 +27,8 @@ class MonitorQueryLogsClientConfiguration: # pylint: disable=too-many-instance- :param endpoint: The Log Analytics service endpoint. Default value is "https://api.loganalytics.io". :type endpoint: str - :keyword api_version: The service API version. Known values are "v1" and None. Default value is - "v1". Note that overriding this default value may result in unsupported behavior. + :keyword api_version: The service API version. Known values are "v1". Default value is "v1". + Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str or ~azure.monitor.query.models.Versions """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/__init__.py index 933fcd7d1b55..9e1931184851 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/__init__.py @@ -12,6 +12,7 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import +from ._operations import _MonitorQueryLogsClientOperationsMixin # type: ignore # pylint: disable=unused-import from ._patch import __all__ as _patch_all from ._patch import * diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/_operations.py index 420c5456135a..777b458fb4f2 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/_operations.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/_operations.py @@ -6,11 +6,10 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -# pylint: disable=protected-access from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload from azure.core import AsyncPipelineClient from azure.core.exceptions import ( @@ -40,7 +39,7 @@ JSON = MutableMapping[str, Any] T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] class _MonitorQueryLogsClientOperationsMixin( @@ -89,9 +88,9 @@ async def _execute( ) -> _models._models.QueryResults: """Execute an Analytics query. - Executes an Analytics query for data. - `Here `_ - is an example for using POST with an Analytics query. + Executes an Analytics query for data. `Here + `_ is an example for + using POST with an Analytics query. :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the Properties @@ -144,6 +143,7 @@ async def _execute( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -158,11 +158,14 @@ async def _execute( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models._models.ErrorResponse, response.json()) + error = _failsafe_deserialize( + _models._models.ErrorResponse, # pylint: disable=protected-access + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize( _models._models.QueryResults, response.json() # pylint: disable=protected-access @@ -215,9 +218,9 @@ async def _execute_with_resource_id( ) -> _models._models.QueryResults: """Execute an Analytics query using resource ID. - Executes an Analytics query for data in the context of a resource. - `Here `_ - is an example for using POST with an Analytics query. + Executes an Analytics query for data in the context of a resource. `Here + `_ is an + example for using POST with an Analytics query. :param resource_id: The identifier of the resource. Required. :type resource_id: str @@ -268,6 +271,7 @@ async def _execute_with_resource_id( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -282,11 +286,14 @@ async def _execute_with_resource_id( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models._models.ErrorResponse, response.json()) + error = _failsafe_deserialize( + _models._models.ErrorResponse, # pylint: disable=protected-access + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize( _models._models.QueryResults, response.json() # pylint: disable=protected-access @@ -316,9 +323,9 @@ async def _batch( ) -> _models._models.BatchResponse: """Execute a batch of Analytics queries. - Executes a batch of Analytics queries for data. - `Here `_ - is an example for using POST with an Analytics query. + Executes a batch of Analytics queries for data. `Here + `_ is an example for + using POST with an Analytics query. :param body: The batch request body. Is one of the following types: BatchRequest, JSON, IO[bytes] Required. @@ -360,6 +367,7 @@ async def _batch( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -374,11 +382,14 @@ async def _batch( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models._models.ErrorResponse, response.json()) + error = _failsafe_deserialize( + _models._models.ErrorResponse, # pylint: disable=protected-access + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize( _models._models.BatchResponse, response.json() # pylint: disable=protected-access diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/models/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/__init__.py index 76cc9379a831..6ce8f01b3aaa 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/models/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/__init__.py @@ -5,7 +5,7 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position +# pylint: disable=wrong-import-position,unused-import from typing import TYPE_CHECKING @@ -13,15 +13,13 @@ from ._patch import * # pylint: disable=unused-wildcard-import -from ._enums import ( # type: ignore - _ColumnType, -) +from . import _models + +from . import _enums from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "_ColumnType", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_enums.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_enums.py index d42e92a47163..e5a1ed462e4b 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_enums.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_enums.py @@ -14,25 +14,25 @@ class _ColumnType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The data type of a column.""" BOOL = "bool" - """Boolean data type""" + """Boolean data type.""" DATETIME = "datetime" - """DateTime data type""" + """DateTime data type.""" DYNAMIC = "dynamic" - """Dynamic data type""" + """Dynamic data type.""" INT = "int" - """Integer data type""" + """Integer data type.""" LONG = "long" - """Long integer data type""" + """Long integer data type.""" REAL = "real" - """Real/floating point data type""" + """Real/floating point data type.""" STRING = "string" - """String data type""" + """String data type.""" GUID = "guid" - """GUID data type""" + """GUID data type.""" DECIMAL = "decimal" - """Decimal data type""" + """Decimal data type.""" TIMESPAN = "timespan" - """Timespan data type""" + """Timespan data type.""" class Versions(str, Enum, metaclass=CaseInsensitiveEnumMeta): diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_models.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_models.py index 99472104c0c5..a890524af094 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_models.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_models.py @@ -7,7 +7,7 @@ # -------------------------------------------------------------------------- # pylint: disable=useless-super-delegation -from typing import Any, Dict, List, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload +from typing import Any, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload from .._utils.model_base import Model as _Model, rest_field @@ -20,11 +20,10 @@ class BatchQueryRequest(_Model): :ivar id: Unique ID corresponding to each request in the batch. Required. :vartype id: str - :ivar headers: Headers of the request. Can use prefer header to set server timeout and to - query statistics and visualization information. + :ivar headers: Headers of the request. Can use prefer header to set server timeout and to query + statistics and visualization information. :vartype headers: dict[str, str] - :ivar body: The Analytics query. Learn more about the - `Analytics query syntax + :ivar body: The Analytics query. Learn more about the `Analytics query syntax `_. Required. :vartype body: ~azure.monitor.query.models._models.QueryBody @@ -33,19 +32,17 @@ class BatchQueryRequest(_Model): :ivar method: The method of a single request in a batch. Required. Default value is "POST". :vartype method: str :ivar workspace: Primary Workspace ID of the query. This is the Workspace ID from the - Properties - blade in the Azure portal. Required. + Properties blade in the Azure portal. Required. :vartype workspace: str """ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Unique ID corresponding to each request in the batch. Required.""" - headers: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Headers of the request. Can use prefer header to set server timeout and to - query statistics and visualization information.""" + headers: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Headers of the request. Can use prefer header to set server timeout and to query statistics and + visualization information.""" body: "_models._models.QueryBody" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The Analytics query. Learn more about the - `Analytics query syntax + """The Analytics query. Learn more about the `Analytics query syntax `_. Required.""" path: Literal["/query"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -53,8 +50,8 @@ class BatchQueryRequest(_Model): method: Literal["POST"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The method of a single request in a batch. Required. Default value is \"POST\".""" workspace: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Primary Workspace ID of the query. This is the Workspace ID from the Properties - blade in the Azure portal. Required.""" + """Primary Workspace ID of the query. This is the Workspace ID from the Properties blade in the + Azure portal. Required.""" @overload def __init__( @@ -63,7 +60,7 @@ def __init__( id: str, # pylint: disable=redefined-builtin body: "_models._models.QueryBody", workspace: str, - headers: Optional[Dict[str, str]] = None, + headers: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -100,7 +97,7 @@ class BatchQueryResponse(_Model): visibility=["read", "create", "update", "delete", "query"] ) """Contains the tables, columns & rows resulting from a query.""" - headers: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + headers: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Dictionary of .""" @overload @@ -110,7 +107,7 @@ def __init__( id: Optional[str] = None, # pylint: disable=redefined-builtin status: Optional[int] = None, body: Optional["_models._models.BatchQueryResults"] = None, - headers: Optional[Dict[str, str]] = None, + headers: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -137,13 +134,13 @@ class BatchQueryResults(_Model): :vartype error: ~azure.monitor.query.models._models.ErrorInfo """ - tables: Optional[List["_models._models.Table"]] = rest_field( + tables: Optional[list["_models._models.Table"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) """The results of the query in tabular format.""" - statistics: Optional[Dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + statistics: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Statistics represented in JSON format.""" - render: Optional[Dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + render: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Visualization data in JSON format.""" error: Optional["_models._models.ErrorInfo"] = rest_field( visibility=["read", "create", "update", "delete", "query"] @@ -154,9 +151,9 @@ class BatchQueryResults(_Model): def __init__( self, *, - tables: Optional[List["_models._models.Table"]] = None, - statistics: Optional[Dict[str, Any]] = None, - render: Optional[Dict[str, Any]] = None, + tables: Optional[list["_models._models.Table"]] = None, + statistics: Optional[dict[str, Any]] = None, + render: Optional[dict[str, Any]] = None, error: Optional["_models._models.ErrorInfo"] = None, ) -> None: ... @@ -178,7 +175,7 @@ class BatchRequest(_Model): :vartype requests: list[~azure.monitor.query.models._models.BatchQueryRequest] """ - requests: List["_models._models.BatchQueryRequest"] = rest_field( + requests: list["_models._models.BatchQueryRequest"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) """An single request in a batch. Required.""" @@ -187,7 +184,7 @@ class BatchRequest(_Model): def __init__( self, *, - requests: List["_models._models.BatchQueryRequest"], + requests: list["_models._models.BatchQueryRequest"], ) -> None: ... @overload @@ -208,7 +205,7 @@ class BatchResponse(_Model): :vartype responses: list[~azure.monitor.query.models._models.BatchQueryResponse] """ - responses: Optional[List["_models._models.BatchQueryResponse"]] = rest_field( + responses: Optional[list["_models._models.BatchQueryResponse"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) """An array of responses corresponding to each individual request in a batch.""" @@ -217,7 +214,7 @@ class BatchResponse(_Model): def __init__( self, *, - responses: Optional[List["_models._models.BatchQueryResponse"]] = None, + responses: Optional[list["_models._models.BatchQueryResponse"]] = None, ) -> None: ... @overload @@ -243,7 +240,9 @@ class Column(_Model): name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of this column. Required.""" - type: Union[str, "_models._ColumnType"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + type: Union[str, "_models._enums._ColumnType"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """The data type of this column. Required. Known values are: \"bool\", \"datetime\", \"dynamic\", \"int\", \"long\", \"real\", \"string\", \"guid\", \"decimal\", and \"timespan\".""" @@ -252,7 +251,7 @@ def __init__( self, *, name: str, - type: Union[str, "_models._ColumnType"], + type: Union[str, "_models._enums._ColumnType"], ) -> None: ... @overload @@ -292,9 +291,9 @@ class ErrorDetail(_Model): """Indicates which property in the request is responsible for the error.""" value: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Indicates which value in 'target' is responsible for the error.""" - resources: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + resources: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Indicates resources which were responsible for the error.""" - additional_properties: Optional[Dict[str, Any]] = rest_field( + additional_properties: Optional[dict[str, Any]] = rest_field( name="additionalProperties", visibility=["read", "create", "update", "delete", "query"] ) """Additional properties that can be provided on the error details object.""" @@ -307,8 +306,8 @@ def __init__( message: str, target: Optional[str] = None, value: Optional[str] = None, - resources: Optional[List[str]] = None, - additional_properties: Optional[Dict[str, Any]] = None, + resources: Optional[list[str]] = None, + additional_properties: Optional[dict[str, Any]] = None, ) -> None: ... @overload @@ -342,7 +341,7 @@ class ErrorInfo(_Model): """A machine readable error code. Required.""" message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """A human readable error message. Required.""" - details: Optional[List["_models._models.ErrorDetail"]] = rest_field( + details: Optional[list["_models._models.ErrorDetail"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) """error details.""" @@ -350,7 +349,7 @@ class ErrorInfo(_Model): visibility=["read", "create", "update", "delete", "query"] ) """Inner error details if they exist.""" - additional_properties: Optional[Dict[str, Any]] = rest_field( + additional_properties: Optional[dict[str, Any]] = rest_field( name="additionalProperties", visibility=["read", "create", "update", "delete", "query"] ) """Additional properties that can be provided on the error info object.""" @@ -361,9 +360,9 @@ def __init__( *, code: str, message: str, - details: Optional[List["_models._models.ErrorDetail"]] = None, + details: Optional[list["_models._models.ErrorDetail"]] = None, innererror: Optional["_models._models.ErrorInfo"] = None, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[dict[str, Any]] = None, ) -> None: ... @overload @@ -406,15 +405,13 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class QueryBody(_Model): - """The Analytics query. Learn more about the - `Analytics query syntax + """The Analytics query. Learn more about the `Analytics query syntax `_. :ivar query: The query to execute. Required. :vartype query: str :ivar timespan: Optional. The timespan over which to query data. This is an ISO8601 time period - value. This timespan is applied in addition to any that are specified in the - query expression. + value. This timespan is applied in addition to any that are specified in the query expression. :vartype timespan: str :ivar workspaces: A list of workspaces to query in addition to the primary workspace. :vartype workspaces: list[str] @@ -423,10 +420,9 @@ class QueryBody(_Model): query: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The query to execute. Required.""" timespan: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional. The timespan over which to query data. This is an ISO8601 time period - value. This timespan is applied in addition to any that are specified in the - query expression.""" - workspaces: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional. The timespan over which to query data. This is an ISO8601 time period value. This + timespan is applied in addition to any that are specified in the query expression.""" + workspaces: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """A list of workspaces to query in addition to the primary workspace.""" @overload @@ -435,7 +431,7 @@ def __init__( *, query: str, timespan: Optional[str] = None, - workspaces: Optional[List[str]] = None, + workspaces: Optional[list[str]] = None, ) -> None: ... @overload @@ -462,11 +458,11 @@ class QueryResults(_Model): :vartype error: ~azure.monitor.query.models._models.ErrorInfo """ - tables: List["_models._models.Table"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + tables: list["_models._models.Table"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The results of the query in tabular format. Required.""" - statistics: Optional[Dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + statistics: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Statistics represented in JSON format.""" - render: Optional[Dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + render: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Visualization data in JSON format.""" error: Optional["_models._models.ErrorInfo"] = rest_field( visibility=["read", "create", "update", "delete", "query"] @@ -477,9 +473,9 @@ class QueryResults(_Model): def __init__( self, *, - tables: List["_models._models.Table"], - statistics: Optional[Dict[str, Any]] = None, - render: Optional[Dict[str, Any]] = None, + tables: list["_models._models.Table"], + statistics: Optional[dict[str, Any]] = None, + render: Optional[dict[str, Any]] = None, error: Optional["_models._models.ErrorInfo"] = None, ) -> None: ... @@ -507,9 +503,9 @@ class Table(_Model): name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the table. Required.""" - columns: List["_models._models.Column"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + columns: list["_models._models.Column"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The list of columns in this table. Required.""" - rows: List[List[Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + rows: list[list[Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The resulting rows from this query. Required.""" @overload @@ -517,8 +513,8 @@ def __init__( self, *, name: str, - columns: List["_models._models.Column"], - rows: List[List[Any]], + columns: list["_models._models.Column"], + rows: list[list[Any]], ) -> None: ... @overload diff --git a/sdk/monitor/azure-monitor-query/pyproject.toml b/sdk/monitor/azure-monitor-query/pyproject.toml index 0504960bccae..982fd2812d21 100644 --- a/sdk/monitor/azure-monitor-query/pyproject.toml +++ b/sdk/monitor/azure-monitor-query/pyproject.toml @@ -1,2 +1,64 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +[build-system] +requires = ["setuptools>=77.0.3", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "azure-monitor-query" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +description = "Microsoft Corporation Azure Monitor Query Client Library for Python" +license = "MIT" +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +requires-python = ">=3.9" +keywords = ["azure", "azure sdk"] + +dependencies = [ + "isodate>=0.6.1", + "azure-core>=1.37.0", + "typing-extensions>=4.6.0", +] +dynamic = [ +"version", "readme" +] + +[project.urls] +repository = "https://github.com/Azure/azure-sdk-for-python" + +[tool.setuptools.dynamic] +version = {attr = "azure.monitor.query._version.VERSION"} +readme = {file = ["README.md", "CHANGELOG.md"], content-type = "text/markdown"} + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "generated_tests*", + "samples*", + "generated_samples*", + "doc*", + "azure", + "azure.monitor", +] + +[tool.setuptools.package-data] +pytyped = ["py.typed"] + [tool.azure-sdk-conda] in_bundle = false diff --git a/sdk/monitor/azure-monitor-query/setup.py b/sdk/monitor/azure-monitor-query/setup.py deleted file mode 100644 index 7b75d47ded79..000000000000 --- a/sdk/monitor/azure-monitor-query/setup.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - - -import os -import re -from setuptools import setup, find_packages - - -PACKAGE_NAME = "azure-monitor-query" -PACKAGE_PPRINT_NAME = "Azure Monitor Query" -PACKAGE_NAMESPACE = "azure.monitor.query" - -# a.b.c => a/b/c -package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") - -# Version extraction inspired from 'requests' -with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: - version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) - -if not version: - raise RuntimeError("Cannot find version information") - - -setup( - name=PACKAGE_NAME, - version=version, - description="Microsoft Corporation {} Client Library for Python".format(PACKAGE_PPRINT_NAME), - long_description=open("README.md", "r").read(), - long_description_content_type="text/markdown", - license="MIT License", - author="Microsoft Corporation", - author_email="azpysdkhelp@microsoft.com", - url="https://github.com/Azure/azure-sdk-for-python/tree/main/sdk", - keywords="azure, azure sdk", - classifiers=[ - "Development Status :: 5 - Production/Stable", - "Programming Language :: Python", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "License :: OSI Approved :: MIT License", - ], - zip_safe=False, - packages=find_packages( - exclude=[ - "tests", - # Exclude packages that will be covered by PEP420 or nspkg - "azure", - "azure.monitor", - ] - ), - include_package_data=True, - package_data={ - "azure.monitor.query": ["py.typed"], - }, - install_requires=[ - "isodate>=0.6.1", - "azure-core>=1.30.0", - "typing-extensions>=4.6.0", - ], - python_requires=">=3.9", -) diff --git a/sdk/monitor/azure-monitor-query/tsp-location.yaml b/sdk/monitor/azure-monitor-query/tsp-location.yaml index 2d4f3eb22554..fa58fccb03b7 100644 --- a/sdk/monitor/azure-monitor-query/tsp-location.yaml +++ b/sdk/monitor/azure-monitor-query/tsp-location.yaml @@ -1,4 +1,4 @@ -directory: specification/monitor/Monitor.Query.Logs -commit: fa3a001758bd80c44ec4aaf7b387f5e8e24f8287 +directory: specification/monitor/data-plane/OperationalInsights +commit: 8cb88c73041a8f3e902d117c464ff3067e68864e repo: Azure/azure-rest-api-specs additionalDirectories: From 74dea04fb5d6e3c167b5a4eda5c48263ba5fe89e Mon Sep 17 00:00:00 2001 From: Paul Van Eck Date: Mon, 6 Apr 2026 22:39:10 +0000 Subject: [PATCH 2/2] Add changelog entry fore core bump Signed-off-by: Paul Van Eck --- sdk/monitor/azure-monitor-query/CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sdk/monitor/azure-monitor-query/CHANGELOG.md b/sdk/monitor/azure-monitor-query/CHANGELOG.md index cd3603d82d64..8686f3d8337a 100644 --- a/sdk/monitor/azure-monitor-query/CHANGELOG.md +++ b/sdk/monitor/azure-monitor-query/CHANGELOG.md @@ -10,6 +10,8 @@ ### Other Changes +- Bump minimum dependency on `azure-core` to `>=1.37.0`. + ## 2.0.0 (2025-07-30) ### Breaking Changes