diff --git a/sdk/durabletask/azure-mgmt-durabletask/CHANGELOG.md b/sdk/durabletask/azure-mgmt-durabletask/CHANGELOG.md
index 261a188dff1b..837e55f94f5a 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/CHANGELOG.md
+++ b/sdk/durabletask/azure-mgmt-durabletask/CHANGELOG.md
@@ -1,5 +1,31 @@
# Release History
+## 1.1.0 (2026-03-09)
+
+### Features Added
+
+ - Model `SchedulerProperties` added property `public_network_access`
+ - Model `SchedulerProperties` added property `private_endpoint_connections`
+ - Model `SchedulerPropertiesUpdate` added property `public_network_access`
+ - Added model `OptionalPropertiesUpdateableProperties`
+ - Added model `PrivateEndpoint`
+ - Added model `PrivateEndpointConnection`
+ - Added model `PrivateEndpointConnectionProperties`
+ - Added enum `PrivateEndpointConnectionProvisioningState`
+ - Added model `PrivateEndpointConnectionUpdate`
+ - Added enum `PrivateEndpointServiceConnectionStatus`
+ - Added model `PrivateLinkResourceProperties`
+ - Added model `PrivateLinkServiceConnectionState`
+ - Added enum `PublicNetworkAccess`
+ - Added model `SchedulerPrivateLinkResource`
+ - Operation group `SchedulersOperations` added method `begin_create_or_update_private_endpoint_connection`
+ - Operation group `SchedulersOperations` added method `begin_delete_private_endpoint_connection`
+ - Operation group `SchedulersOperations` added method `begin_update_private_endpoint_connection`
+ - Operation group `SchedulersOperations` added method `get_private_endpoint_connection`
+ - Operation group `SchedulersOperations` added method `get_private_link`
+ - Operation group `SchedulersOperations` added method `list_private_endpoint_connections`
+ - Operation group `SchedulersOperations` added method `list_private_links`
+
## 1.0.0 (2025-09-25)
### Features Added
diff --git a/sdk/durabletask/azure-mgmt-durabletask/_metadata.json b/sdk/durabletask/azure-mgmt-durabletask/_metadata.json
index 803fd52eca48..2a003262371c 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/_metadata.json
+++ b/sdk/durabletask/azure-mgmt-durabletask/_metadata.json
@@ -1,7 +1,10 @@
{
- "apiVersion": "2025-11-01",
- "commit": "688609bf18c3978794a0c5ca31557c286c623b44",
+ "apiVersion": "2026-02-01",
+ "apiVersions": {
+ "Microsoft.DurableTask": "2026-02-01"
+ },
+ "commit": "a86317f00c71ef51aab7f78e6f4a2be3cf58f2cd",
"repository_url": "https://github.com/Azure/azure-rest-api-specs",
"typespec_src": "specification/durabletask/DurableTask.Management",
- "emitterVersion": "0.51.2"
+ "emitterVersion": "0.60.2"
}
\ No newline at end of file
diff --git a/sdk/durabletask/azure-mgmt-durabletask/apiview-properties.json b/sdk/durabletask/azure-mgmt-durabletask/apiview-properties.json
index 2b7c39917921..529451e3fd12 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/apiview-properties.json
+++ b/sdk/durabletask/azure-mgmt-durabletask/apiview-properties.json
@@ -6,13 +6,21 @@
"azure.mgmt.durabletask.models.ErrorResponse": "Azure.ResourceManager.CommonTypes.ErrorResponse",
"azure.mgmt.durabletask.models.Operation": "Azure.ResourceManager.CommonTypes.Operation",
"azure.mgmt.durabletask.models.OperationDisplay": "Azure.ResourceManager.CommonTypes.OperationDisplay",
+ "azure.mgmt.durabletask.models.OptionalPropertiesUpdateableProperties": "TypeSpec.OptionalProperties",
+ "azure.mgmt.durabletask.models.PrivateEndpoint": "Azure.ResourceManager.CommonTypes.PrivateEndpoint",
"azure.mgmt.durabletask.models.Resource": "Azure.ResourceManager.CommonTypes.Resource",
+ "azure.mgmt.durabletask.models.PrivateEndpointConnection": "Microsoft.DurableTask.PrivateEndpointConnection",
+ "azure.mgmt.durabletask.models.PrivateEndpointConnectionProperties": "Azure.ResourceManager.CommonTypes.PrivateEndpointConnectionProperties",
+ "azure.mgmt.durabletask.models.PrivateEndpointConnectionUpdate": "Azure.ResourceManager.PrivateEndpointConnectionUpdate",
+ "azure.mgmt.durabletask.models.PrivateLinkResourceProperties": "Azure.ResourceManager.CommonTypes.PrivateLinkResourceProperties",
+ "azure.mgmt.durabletask.models.PrivateLinkServiceConnectionState": "Azure.ResourceManager.CommonTypes.PrivateLinkServiceConnectionState",
"azure.mgmt.durabletask.models.ProxyResource": "Azure.ResourceManager.CommonTypes.ProxyResource",
"azure.mgmt.durabletask.models.RetentionPolicy": "Microsoft.DurableTask.RetentionPolicy",
"azure.mgmt.durabletask.models.RetentionPolicyDetails": "Microsoft.DurableTask.RetentionPolicyDetails",
"azure.mgmt.durabletask.models.RetentionPolicyProperties": "Microsoft.DurableTask.RetentionPolicyProperties",
"azure.mgmt.durabletask.models.TrackedResource": "Azure.ResourceManager.CommonTypes.TrackedResource",
"azure.mgmt.durabletask.models.Scheduler": "Microsoft.DurableTask.Scheduler",
+ "azure.mgmt.durabletask.models.SchedulerPrivateLinkResource": "Microsoft.DurableTask.SchedulerPrivateLinkResource",
"azure.mgmt.durabletask.models.SchedulerProperties": "Microsoft.DurableTask.SchedulerProperties",
"azure.mgmt.durabletask.models.SchedulerPropertiesUpdate": "Microsoft.DurableTask.SchedulerPropertiesUpdate",
"azure.mgmt.durabletask.models.SchedulerSku": "Microsoft.DurableTask.SchedulerSku",
@@ -27,6 +35,9 @@
"azure.mgmt.durabletask.models.ProvisioningState": "Microsoft.DurableTask.ProvisioningState",
"azure.mgmt.durabletask.models.SchedulerSkuName": "Microsoft.DurableTask.SchedulerSkuName",
"azure.mgmt.durabletask.models.RedundancyState": "Microsoft.DurableTask.RedundancyState",
+ "azure.mgmt.durabletask.models.PublicNetworkAccess": "Microsoft.DurableTask.PublicNetworkAccess",
+ "azure.mgmt.durabletask.models.PrivateEndpointServiceConnectionStatus": "Azure.ResourceManager.CommonTypes.PrivateEndpointServiceConnectionStatus",
+ "azure.mgmt.durabletask.models.PrivateEndpointConnectionProvisioningState": "Azure.ResourceManager.CommonTypes.PrivateEndpointConnectionProvisioningState",
"azure.mgmt.durabletask.models.PurgeableOrchestrationState": "Microsoft.DurableTask.PurgeableOrchestrationState",
"azure.mgmt.durabletask.operations.Operations.list": "Azure.ResourceManager.Operations.list",
"azure.mgmt.durabletask.aio.operations.Operations.list": "Azure.ResourceManager.Operations.list",
@@ -42,6 +53,20 @@
"azure.mgmt.durabletask.aio.operations.SchedulersOperations.list_by_resource_group": "Microsoft.DurableTask.Schedulers.listByResourceGroup",
"azure.mgmt.durabletask.operations.SchedulersOperations.list_by_subscription": "Microsoft.DurableTask.Schedulers.listBySubscription",
"azure.mgmt.durabletask.aio.operations.SchedulersOperations.list_by_subscription": "Microsoft.DurableTask.Schedulers.listBySubscription",
+ "azure.mgmt.durabletask.operations.SchedulersOperations.get_private_link": "Microsoft.DurableTask.Schedulers.getPrivateLink",
+ "azure.mgmt.durabletask.aio.operations.SchedulersOperations.get_private_link": "Microsoft.DurableTask.Schedulers.getPrivateLink",
+ "azure.mgmt.durabletask.operations.SchedulersOperations.list_private_links": "Microsoft.DurableTask.Schedulers.listPrivateLinks",
+ "azure.mgmt.durabletask.aio.operations.SchedulersOperations.list_private_links": "Microsoft.DurableTask.Schedulers.listPrivateLinks",
+ "azure.mgmt.durabletask.operations.SchedulersOperations.get_private_endpoint_connection": "Microsoft.DurableTask.Schedulers.getPrivateEndpointConnection",
+ "azure.mgmt.durabletask.aio.operations.SchedulersOperations.get_private_endpoint_connection": "Microsoft.DurableTask.Schedulers.getPrivateEndpointConnection",
+ "azure.mgmt.durabletask.operations.SchedulersOperations.begin_create_or_update_private_endpoint_connection": "Microsoft.DurableTask.Schedulers.createOrUpdatePrivateEndpointConnection",
+ "azure.mgmt.durabletask.aio.operations.SchedulersOperations.begin_create_or_update_private_endpoint_connection": "Microsoft.DurableTask.Schedulers.createOrUpdatePrivateEndpointConnection",
+ "azure.mgmt.durabletask.operations.SchedulersOperations.begin_update_private_endpoint_connection": "Microsoft.DurableTask.Schedulers.updatePrivateEndpointConnection",
+ "azure.mgmt.durabletask.aio.operations.SchedulersOperations.begin_update_private_endpoint_connection": "Microsoft.DurableTask.Schedulers.updatePrivateEndpointConnection",
+ "azure.mgmt.durabletask.operations.SchedulersOperations.begin_delete_private_endpoint_connection": "Microsoft.DurableTask.Schedulers.deletePrivateEndpointConnection",
+ "azure.mgmt.durabletask.aio.operations.SchedulersOperations.begin_delete_private_endpoint_connection": "Microsoft.DurableTask.Schedulers.deletePrivateEndpointConnection",
+ "azure.mgmt.durabletask.operations.SchedulersOperations.list_private_endpoint_connections": "Microsoft.DurableTask.Schedulers.listPrivateEndpointConnections",
+ "azure.mgmt.durabletask.aio.operations.SchedulersOperations.list_private_endpoint_connections": "Microsoft.DurableTask.Schedulers.listPrivateEndpointConnections",
"azure.mgmt.durabletask.operations.TaskHubsOperations.get": "Microsoft.DurableTask.TaskHubs.get",
"azure.mgmt.durabletask.aio.operations.TaskHubsOperations.get": "Microsoft.DurableTask.TaskHubs.get",
"azure.mgmt.durabletask.operations.TaskHubsOperations.begin_create_or_update": "Microsoft.DurableTask.TaskHubs.createOrUpdate",
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_client.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_client.py
index 148d24cae0e6..0fd9b1d201e6 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_client.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_client.py
@@ -46,8 +46,9 @@ class DurableTaskMgmtClient:
:keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is
None.
:paramtype cloud_setting: ~azure.core.AzureClouds
- :keyword api_version: The API version to use for this operation. Default value is "2025-11-01".
- Note that overriding this default value may result in unsupported behavior.
+ :keyword api_version: The API version to use for this operation. Known values are "2026-02-01"
+ and None. Default value is "2026-02-01". Note that overriding this default value may result in
+ unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_configuration.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_configuration.py
index 493acfeeff5f..c3cb9d9052ef 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_configuration.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_configuration.py
@@ -33,8 +33,9 @@ class DurableTaskMgmtClientConfiguration: # pylint: disable=too-many-instance-a
:param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is
None.
:type cloud_setting: ~azure.core.AzureClouds
- :keyword api_version: The API version to use for this operation. Default value is "2025-11-01".
- Note that overriding this default value may result in unsupported behavior.
+ :keyword api_version: The API version to use for this operation. Known values are "2026-02-01"
+ and None. Default value is "2026-02-01". Note that overriding this default value may result in
+ unsupported behavior.
:paramtype api_version: str
"""
@@ -46,7 +47,7 @@ def __init__(
cloud_setting: Optional["AzureClouds"] = None,
**kwargs: Any
) -> None:
- api_version: str = kwargs.pop("api_version", "2025-11-01")
+ api_version: str = kwargs.pop("api_version", "2026-02-01")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_model_base.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_model_base.py
deleted file mode 100644
index 49d5c7259389..000000000000
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_model_base.py
+++ /dev/null
@@ -1,1232 +0,0 @@
-# pylint: disable=too-many-lines
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) Python Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-# pylint: disable=protected-access, broad-except
-
-import copy
-import calendar
-import decimal
-import functools
-import sys
-import logging
-import base64
-import re
-import typing
-import enum
-import email.utils
-from datetime import datetime, date, time, timedelta, timezone
-from json import JSONEncoder
-import xml.etree.ElementTree as ET
-from collections.abc import MutableMapping
-from typing_extensions import Self
-import isodate
-from azure.core.exceptions import DeserializationError
-from azure.core import CaseInsensitiveEnumMeta
-from azure.core.pipeline import PipelineResponse
-from azure.core.serialization import _Null
-
-_LOGGER = logging.getLogger(__name__)
-
-__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"]
-
-TZ_UTC = timezone.utc
-_T = typing.TypeVar("_T")
-
-
-def _timedelta_as_isostr(td: timedelta) -> str:
- """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S'
-
- Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython
-
- :param timedelta td: The timedelta to convert
- :rtype: str
- :return: ISO8601 version of this timedelta
- """
-
- # Split seconds to larger units
- seconds = td.total_seconds()
- minutes, seconds = divmod(seconds, 60)
- hours, minutes = divmod(minutes, 60)
- days, hours = divmod(hours, 24)
-
- days, hours, minutes = list(map(int, (days, hours, minutes)))
- seconds = round(seconds, 6)
-
- # Build date
- date_str = ""
- if days:
- date_str = "%sD" % days
-
- if hours or minutes or seconds:
- # Build time
- time_str = "T"
-
- # Hours
- bigger_exists = date_str or hours
- if bigger_exists:
- time_str += "{:02}H".format(hours)
-
- # Minutes
- bigger_exists = bigger_exists or minutes
- if bigger_exists:
- time_str += "{:02}M".format(minutes)
-
- # Seconds
- try:
- if seconds.is_integer():
- seconds_string = "{:02}".format(int(seconds))
- else:
- # 9 chars long w/ leading 0, 6 digits after decimal
- seconds_string = "%09.6f" % seconds
- # Remove trailing zeros
- seconds_string = seconds_string.rstrip("0")
- except AttributeError: # int.is_integer() raises
- seconds_string = "{:02}".format(seconds)
-
- time_str += "{}S".format(seconds_string)
- else:
- time_str = ""
-
- return "P" + date_str + time_str
-
-
-def _serialize_bytes(o, format: typing.Optional[str] = None) -> str:
- encoded = base64.b64encode(o).decode()
- if format == "base64url":
- return encoded.strip("=").replace("+", "-").replace("/", "_")
- return encoded
-
-
-def _serialize_datetime(o, format: typing.Optional[str] = None):
- if hasattr(o, "year") and hasattr(o, "hour"):
- if format == "rfc7231":
- return email.utils.format_datetime(o, usegmt=True)
- if format == "unix-timestamp":
- return int(calendar.timegm(o.utctimetuple()))
-
- # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set)
- if not o.tzinfo:
- iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat()
- else:
- iso_formatted = o.astimezone(TZ_UTC).isoformat()
- # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt)
- return iso_formatted.replace("+00:00", "Z")
- # Next try datetime.date or datetime.time
- return o.isoformat()
-
-
-def _is_readonly(p):
- try:
- return p._visibility == ["read"]
- except AttributeError:
- return False
-
-
-class SdkJSONEncoder(JSONEncoder):
- """A JSON encoder that's capable of serializing datetime objects and bytes."""
-
- def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs):
- super().__init__(*args, **kwargs)
- self.exclude_readonly = exclude_readonly
- self.format = format
-
- def default(self, o): # pylint: disable=too-many-return-statements
- if _is_model(o):
- if self.exclude_readonly:
- readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)]
- return {k: v for k, v in o.items() if k not in readonly_props}
- return dict(o.items())
- try:
- return super(SdkJSONEncoder, self).default(o)
- except TypeError:
- if isinstance(o, _Null):
- return None
- if isinstance(o, decimal.Decimal):
- return float(o)
- if isinstance(o, (bytes, bytearray)):
- return _serialize_bytes(o, self.format)
- try:
- # First try datetime.datetime
- return _serialize_datetime(o, self.format)
- except AttributeError:
- pass
- # Last, try datetime.timedelta
- try:
- return _timedelta_as_isostr(o)
- except AttributeError:
- # This will be raised when it hits value.total_seconds in the method above
- pass
- return super(SdkJSONEncoder, self).default(o)
-
-
-_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
-_VALID_RFC7231 = re.compile(
- r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s"
- r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT"
-)
-
-
-def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime:
- """Deserialize ISO-8601 formatted string into Datetime object.
-
- :param str attr: response string to be deserialized.
- :rtype: ~datetime.datetime
- :returns: The datetime object from that input
- """
- if isinstance(attr, datetime):
- # i'm already deserialized
- return attr
- attr = attr.upper()
- match = _VALID_DATE.match(attr)
- if not match:
- raise ValueError("Invalid datetime string: " + attr)
-
- check_decimal = attr.split(".")
- if len(check_decimal) > 1:
- decimal_str = ""
- for digit in check_decimal[1]:
- if digit.isdigit():
- decimal_str += digit
- else:
- break
- if len(decimal_str) > 6:
- attr = attr.replace(decimal_str, decimal_str[0:6])
-
- date_obj = isodate.parse_datetime(attr)
- test_utc = date_obj.utctimetuple()
- if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
- raise OverflowError("Hit max or min date")
- return date_obj
-
-
-def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime:
- """Deserialize RFC7231 formatted string into Datetime object.
-
- :param str attr: response string to be deserialized.
- :rtype: ~datetime.datetime
- :returns: The datetime object from that input
- """
- if isinstance(attr, datetime):
- # i'm already deserialized
- return attr
- match = _VALID_RFC7231.match(attr)
- if not match:
- raise ValueError("Invalid datetime string: " + attr)
-
- return email.utils.parsedate_to_datetime(attr)
-
-
-def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime:
- """Deserialize unix timestamp into Datetime object.
-
- :param str attr: response string to be deserialized.
- :rtype: ~datetime.datetime
- :returns: The datetime object from that input
- """
- if isinstance(attr, datetime):
- # i'm already deserialized
- return attr
- return datetime.fromtimestamp(attr, TZ_UTC)
-
-
-def _deserialize_date(attr: typing.Union[str, date]) -> date:
- """Deserialize ISO-8601 formatted string into Date object.
- :param str attr: response string to be deserialized.
- :rtype: date
- :returns: The date object from that input
- """
- # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
- if isinstance(attr, date):
- return attr
- return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore
-
-
-def _deserialize_time(attr: typing.Union[str, time]) -> time:
- """Deserialize ISO-8601 formatted string into time object.
-
- :param str attr: response string to be deserialized.
- :rtype: datetime.time
- :returns: The time object from that input
- """
- if isinstance(attr, time):
- return attr
- return isodate.parse_time(attr)
-
-
-def _deserialize_bytes(attr):
- if isinstance(attr, (bytes, bytearray)):
- return attr
- return bytes(base64.b64decode(attr))
-
-
-def _deserialize_bytes_base64(attr):
- if isinstance(attr, (bytes, bytearray)):
- return attr
- padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore
- attr = attr + padding # type: ignore
- encoded = attr.replace("-", "+").replace("_", "/")
- return bytes(base64.b64decode(encoded))
-
-
-def _deserialize_duration(attr):
- if isinstance(attr, timedelta):
- return attr
- return isodate.parse_duration(attr)
-
-
-def _deserialize_decimal(attr):
- if isinstance(attr, decimal.Decimal):
- return attr
- return decimal.Decimal(str(attr))
-
-
-def _deserialize_int_as_str(attr):
- if isinstance(attr, int):
- return attr
- return int(attr)
-
-
-_DESERIALIZE_MAPPING = {
- datetime: _deserialize_datetime,
- date: _deserialize_date,
- time: _deserialize_time,
- bytes: _deserialize_bytes,
- bytearray: _deserialize_bytes,
- timedelta: _deserialize_duration,
- typing.Any: lambda x: x,
- decimal.Decimal: _deserialize_decimal,
-}
-
-_DESERIALIZE_MAPPING_WITHFORMAT = {
- "rfc3339": _deserialize_datetime,
- "rfc7231": _deserialize_datetime_rfc7231,
- "unix-timestamp": _deserialize_datetime_unix_timestamp,
- "base64": _deserialize_bytes,
- "base64url": _deserialize_bytes_base64,
-}
-
-
-def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None):
- if annotation is int and rf and rf._format == "str":
- return _deserialize_int_as_str
- if rf and rf._format:
- return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format)
- return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
-
-
-def _get_type_alias_type(module_name: str, alias_name: str):
- types = {
- k: v
- for k, v in sys.modules[module_name].__dict__.items()
- if isinstance(v, typing._GenericAlias) # type: ignore
- }
- if alias_name not in types:
- return alias_name
- return types[alias_name]
-
-
-def _get_model(module_name: str, model_name: str):
- models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)}
- module_end = module_name.rsplit(".", 1)[0]
- models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)})
- if isinstance(model_name, str):
- model_name = model_name.split(".")[-1]
- if model_name not in models:
- return model_name
- return models[model_name]
-
-
-_UNSET = object()
-
-
-class _MyMutableMapping(MutableMapping[str, typing.Any]):
- def __init__(self, data: typing.Dict[str, typing.Any]) -> None:
- self._data = data
-
- def __contains__(self, key: typing.Any) -> bool:
- return key in self._data
-
- def __getitem__(self, key: str) -> typing.Any:
- return self._data.__getitem__(key)
-
- def __setitem__(self, key: str, value: typing.Any) -> None:
- self._data.__setitem__(key, value)
-
- def __delitem__(self, key: str) -> None:
- self._data.__delitem__(key)
-
- def __iter__(self) -> typing.Iterator[typing.Any]:
- return self._data.__iter__()
-
- def __len__(self) -> int:
- return self._data.__len__()
-
- def __ne__(self, other: typing.Any) -> bool:
- return not self.__eq__(other)
-
- def keys(self) -> typing.KeysView[str]:
- """
- :returns: a set-like object providing a view on D's keys
- :rtype: ~typing.KeysView
- """
- return self._data.keys()
-
- def values(self) -> typing.ValuesView[typing.Any]:
- """
- :returns: an object providing a view on D's values
- :rtype: ~typing.ValuesView
- """
- return self._data.values()
-
- def items(self) -> typing.ItemsView[str, typing.Any]:
- """
- :returns: set-like object providing a view on D's items
- :rtype: ~typing.ItemsView
- """
- return self._data.items()
-
- def get(self, key: str, default: typing.Any = None) -> typing.Any:
- """
- Get the value for key if key is in the dictionary, else default.
- :param str key: The key to look up.
- :param any default: The value to return if key is not in the dictionary. Defaults to None
- :returns: D[k] if k in D, else d.
- :rtype: any
- """
- try:
- return self[key]
- except KeyError:
- return default
-
- @typing.overload
- def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ
-
- @typing.overload
- def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs
-
- @typing.overload
- def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs
-
- def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any:
- """
- Removes specified key and return the corresponding value.
- :param str key: The key to pop.
- :param any default: The value to return if key is not in the dictionary
- :returns: The value corresponding to the key.
- :rtype: any
- :raises KeyError: If key is not found and default is not given.
- """
- if default is _UNSET:
- return self._data.pop(key)
- return self._data.pop(key, default)
-
- def popitem(self) -> typing.Tuple[str, typing.Any]:
- """
- Removes and returns some (key, value) pair
- :returns: The (key, value) pair.
- :rtype: tuple
- :raises KeyError: if D is empty.
- """
- return self._data.popitem()
-
- def clear(self) -> None:
- """
- Remove all items from D.
- """
- self._data.clear()
-
- def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ
- """
- Updates D from mapping/iterable E and F.
- :param any args: Either a mapping object or an iterable of key-value pairs.
- """
- self._data.update(*args, **kwargs)
-
- @typing.overload
- def setdefault(self, key: str, default: None = None) -> None: ...
-
- @typing.overload
- def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs
-
- def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any:
- """
- Same as calling D.get(k, d), and setting D[k]=d if k not found
- :param str key: The key to look up.
- :param any default: The value to set if key is not in the dictionary
- :returns: D[k] if k in D, else d.
- :rtype: any
- """
- if default is _UNSET:
- return self._data.setdefault(key)
- return self._data.setdefault(key, default)
-
- def __eq__(self, other: typing.Any) -> bool:
- try:
- other_model = self.__class__(other)
- except Exception:
- return False
- return self._data == other_model._data
-
- def __repr__(self) -> str:
- return str(self._data)
-
-
-def _is_model(obj: typing.Any) -> bool:
- return getattr(obj, "_is_model", False)
-
-
-def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements
- if isinstance(o, list):
- return [_serialize(x, format) for x in o]
- if isinstance(o, dict):
- return {k: _serialize(v, format) for k, v in o.items()}
- if isinstance(o, set):
- return {_serialize(x, format) for x in o}
- if isinstance(o, tuple):
- return tuple(_serialize(x, format) for x in o)
- if isinstance(o, (bytes, bytearray)):
- return _serialize_bytes(o, format)
- if isinstance(o, decimal.Decimal):
- return float(o)
- if isinstance(o, enum.Enum):
- return o.value
- if isinstance(o, int):
- if format == "str":
- return str(o)
- return o
- try:
- # First try datetime.datetime
- return _serialize_datetime(o, format)
- except AttributeError:
- pass
- # Last, try datetime.timedelta
- try:
- return _timedelta_as_isostr(o)
- except AttributeError:
- # This will be raised when it hits value.total_seconds in the method above
- pass
- return o
-
-
-def _get_rest_field(
- attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str
-) -> typing.Optional["_RestField"]:
- try:
- return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name)
- except StopIteration:
- return None
-
-
-def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any:
- if not rf:
- return _serialize(value, None)
- if rf._is_multipart_file_input:
- return value
- if rf._is_model:
- return _deserialize(rf._type, value)
- if isinstance(value, ET.Element):
- value = _deserialize(rf._type, value)
- return _serialize(value, rf._format)
-
-
-class Model(_MyMutableMapping):
- _is_model = True
- # label whether current class's _attr_to_rest_field has been calculated
- # could not see _attr_to_rest_field directly because subclass inherits it from parent class
- _calculated: typing.Set[str] = set()
-
- def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None:
- class_name = self.__class__.__name__
- if len(args) > 1:
- raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given")
- dict_to_pass = {
- rest_field._rest_name: rest_field._default
- for rest_field in self._attr_to_rest_field.values()
- if rest_field._default is not _UNSET
- }
- if args: # pylint: disable=too-many-nested-blocks
- if isinstance(args[0], ET.Element):
- existed_attr_keys = []
- model_meta = getattr(self, "_xml", {})
-
- for rf in self._attr_to_rest_field.values():
- prop_meta = getattr(rf, "_xml", {})
- xml_name = prop_meta.get("name", rf._rest_name)
- xml_ns = prop_meta.get("ns", model_meta.get("ns", None))
- if xml_ns:
- xml_name = "{" + xml_ns + "}" + xml_name
-
- # attribute
- if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None:
- existed_attr_keys.append(xml_name)
- dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name))
- continue
-
- # unwrapped element is array
- if prop_meta.get("unwrapped", False):
- # unwrapped array could either use prop items meta/prop meta
- if prop_meta.get("itemsName"):
- xml_name = prop_meta.get("itemsName")
- xml_ns = prop_meta.get("itemNs")
- if xml_ns:
- xml_name = "{" + xml_ns + "}" + xml_name
- items = args[0].findall(xml_name) # pyright: ignore
- if len(items) > 0:
- existed_attr_keys.append(xml_name)
- dict_to_pass[rf._rest_name] = _deserialize(rf._type, items)
- continue
-
- # text element is primitive type
- if prop_meta.get("text", False):
- if args[0].text is not None:
- dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text)
- continue
-
- # wrapped element could be normal property or array, it should only have one element
- item = args[0].find(xml_name)
- if item is not None:
- existed_attr_keys.append(xml_name)
- dict_to_pass[rf._rest_name] = _deserialize(rf._type, item)
-
- # rest thing is additional properties
- for e in args[0]:
- if e.tag not in existed_attr_keys:
- dict_to_pass[e.tag] = _convert_element(e)
- else:
- dict_to_pass.update(
- {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()}
- )
- else:
- non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field]
- if non_attr_kwargs:
- # actual type errors only throw the first wrong keyword arg they see, so following that.
- raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'")
- dict_to_pass.update(
- {
- self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v)
- for k, v in kwargs.items()
- if v is not None
- }
- )
- super().__init__(dict_to_pass)
-
- def copy(self) -> "Model":
- return Model(self.__dict__)
-
- def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self:
- if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated:
- # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping',
- # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object'
- mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order
- attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property
- k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type")
- }
- annotations = {
- k: v
- for mro_class in mros
- if hasattr(mro_class, "__annotations__")
- for k, v in mro_class.__annotations__.items()
- }
- for attr, rf in attr_to_rest_field.items():
- rf._module = cls.__module__
- if not rf._type:
- rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None))
- if not rf._rest_name_input:
- rf._rest_name_input = attr
- cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items())
- cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}")
-
- return super().__new__(cls)
-
- def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None:
- for base in cls.__bases__:
- if hasattr(base, "__mapping__"):
- base.__mapping__[discriminator or cls.__name__] = cls # type: ignore
-
- @classmethod
- def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]:
- for v in cls.__dict__.values():
- if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators:
- return v
- return None
-
- @classmethod
- def _deserialize(cls, data, exist_discriminators):
- if not hasattr(cls, "__mapping__"):
- return cls(data)
- discriminator = cls._get_discriminator(exist_discriminators)
- if discriminator is None:
- return cls(data)
- exist_discriminators.append(discriminator._rest_name)
- if isinstance(data, ET.Element):
- model_meta = getattr(cls, "_xml", {})
- prop_meta = getattr(discriminator, "_xml", {})
- xml_name = prop_meta.get("name", discriminator._rest_name)
- xml_ns = prop_meta.get("ns", model_meta.get("ns", None))
- if xml_ns:
- xml_name = "{" + xml_ns + "}" + xml_name
-
- if data.get(xml_name) is not None:
- discriminator_value = data.get(xml_name)
- else:
- discriminator_value = data.find(xml_name).text # pyright: ignore
- else:
- discriminator_value = data.get(discriminator._rest_name)
- mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member
- return mapped_cls._deserialize(data, exist_discriminators)
-
- def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]:
- """Return a dict that can be turned into json using json.dump.
-
- :keyword bool exclude_readonly: Whether to remove the readonly properties.
- :returns: A dict JSON compatible object
- :rtype: dict
- """
-
- result = {}
- readonly_props = []
- if exclude_readonly:
- readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)]
- for k, v in self.items():
- if exclude_readonly and k in readonly_props: # pyright: ignore
- continue
- is_multipart_file_input = False
- try:
- is_multipart_file_input = next(
- rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k
- )._is_multipart_file_input
- except StopIteration:
- pass
- result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly)
- return result
-
- @staticmethod
- def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any:
- if v is None or isinstance(v, _Null):
- return None
- if isinstance(v, (list, tuple, set)):
- return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v)
- if isinstance(v, dict):
- return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()}
- return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v
-
-
-def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj):
- if _is_model(obj):
- return obj
- return _deserialize(model_deserializer, obj)
-
-
-def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj):
- if obj is None:
- return obj
- return _deserialize_with_callable(if_obj_deserializer, obj)
-
-
-def _deserialize_with_union(deserializers, obj):
- for deserializer in deserializers:
- try:
- return _deserialize(deserializer, obj)
- except DeserializationError:
- pass
- raise DeserializationError()
-
-
-def _deserialize_dict(
- value_deserializer: typing.Optional[typing.Callable],
- module: typing.Optional[str],
- obj: typing.Dict[typing.Any, typing.Any],
-):
- if obj is None:
- return obj
- if isinstance(obj, ET.Element):
- obj = {child.tag: child for child in obj}
- return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()}
-
-
-def _deserialize_multiple_sequence(
- entry_deserializers: typing.List[typing.Optional[typing.Callable]],
- module: typing.Optional[str],
- obj,
-):
- if obj is None:
- return obj
- return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers))
-
-
-def _deserialize_sequence(
- deserializer: typing.Optional[typing.Callable],
- module: typing.Optional[str],
- obj,
-):
- if obj is None:
- return obj
- if isinstance(obj, ET.Element):
- obj = list(obj)
- return type(obj)(_deserialize(deserializer, entry, module) for entry in obj)
-
-
-def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]:
- return sorted(
- types,
- key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"),
- )
-
-
-def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches
- annotation: typing.Any,
- module: typing.Optional[str],
- rf: typing.Optional["_RestField"] = None,
-) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]:
- if not annotation:
- return None
-
- # is it a type alias?
- if isinstance(annotation, str):
- if module is not None:
- annotation = _get_type_alias_type(module, annotation)
-
- # is it a forward ref / in quotes?
- if isinstance(annotation, (str, typing.ForwardRef)):
- try:
- model_name = annotation.__forward_arg__ # type: ignore
- except AttributeError:
- model_name = annotation
- if module is not None:
- annotation = _get_model(module, model_name) # type: ignore
-
- try:
- if module and _is_model(annotation):
- if rf:
- rf._is_model = True
-
- return functools.partial(_deserialize_model, annotation) # pyright: ignore
- except Exception:
- pass
-
- # is it a literal?
- try:
- if annotation.__origin__ is typing.Literal: # pyright: ignore
- return None
- except AttributeError:
- pass
-
- # is it optional?
- try:
- if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore
- if len(annotation.__args__) <= 2: # pyright: ignore
- if_obj_deserializer = _get_deserialize_callable_from_annotation(
- next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore
- )
-
- return functools.partial(_deserialize_with_optional, if_obj_deserializer)
- # the type is Optional[Union[...]], we need to remove the None type from the Union
- annotation_copy = copy.copy(annotation)
- annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore
- return _get_deserialize_callable_from_annotation(annotation_copy, module, rf)
- except AttributeError:
- pass
-
- # is it union?
- if getattr(annotation, "__origin__", None) is typing.Union:
- # initial ordering is we make `string` the last deserialization option, because it is often them most generic
- deserializers = [
- _get_deserialize_callable_from_annotation(arg, module, rf)
- for arg in _sorted_annotations(annotation.__args__) # pyright: ignore
- ]
-
- return functools.partial(_deserialize_with_union, deserializers)
-
- try:
- if annotation._name == "Dict": # pyright: ignore
- value_deserializer = _get_deserialize_callable_from_annotation(
- annotation.__args__[1], module, rf # pyright: ignore
- )
-
- return functools.partial(
- _deserialize_dict,
- value_deserializer,
- module,
- )
- except (AttributeError, IndexError):
- pass
- try:
- if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore
- if len(annotation.__args__) > 1: # pyright: ignore
- entry_deserializers = [
- _get_deserialize_callable_from_annotation(dt, module, rf)
- for dt in annotation.__args__ # pyright: ignore
- ]
- return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module)
- deserializer = _get_deserialize_callable_from_annotation(
- annotation.__args__[0], module, rf # pyright: ignore
- )
-
- return functools.partial(_deserialize_sequence, deserializer, module)
- except (TypeError, IndexError, AttributeError, SyntaxError):
- pass
-
- def _deserialize_default(
- deserializer,
- obj,
- ):
- if obj is None:
- return obj
- try:
- return _deserialize_with_callable(deserializer, obj)
- except Exception:
- pass
- return obj
-
- if get_deserializer(annotation, rf):
- return functools.partial(_deserialize_default, get_deserializer(annotation, rf))
-
- return functools.partial(_deserialize_default, annotation)
-
-
-def _deserialize_with_callable(
- deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]],
- value: typing.Any,
-): # pylint: disable=too-many-return-statements
- try:
- if value is None or isinstance(value, _Null):
- return None
- if isinstance(value, ET.Element):
- if deserializer is str:
- return value.text or ""
- if deserializer is int:
- return int(value.text) if value.text else None
- if deserializer is float:
- return float(value.text) if value.text else None
- if deserializer is bool:
- return value.text == "true" if value.text else None
- if deserializer is None:
- return value
- if deserializer in [int, float, bool]:
- return deserializer(value)
- if isinstance(deserializer, CaseInsensitiveEnumMeta):
- try:
- return deserializer(value)
- except ValueError:
- # for unknown value, return raw value
- return value
- if isinstance(deserializer, type) and issubclass(deserializer, Model):
- return deserializer._deserialize(value, [])
- return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value)
- except Exception as e:
- raise DeserializationError() from e
-
-
-def _deserialize(
- deserializer: typing.Any,
- value: typing.Any,
- module: typing.Optional[str] = None,
- rf: typing.Optional["_RestField"] = None,
- format: typing.Optional[str] = None,
-) -> typing.Any:
- if isinstance(value, PipelineResponse):
- value = value.http_response.json()
- if rf is None and format:
- rf = _RestField(format=format)
- if not isinstance(deserializer, functools.partial):
- deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf)
- return _deserialize_with_callable(deserializer, value)
-
-
-def _failsafe_deserialize(
- deserializer: typing.Any,
- value: typing.Any,
- module: typing.Optional[str] = None,
- rf: typing.Optional["_RestField"] = None,
- format: typing.Optional[str] = None,
-) -> typing.Any:
- try:
- return _deserialize(deserializer, value, module, rf, format)
- except DeserializationError:
- _LOGGER.warning(
- "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
- )
- return None
-
-
-def _failsafe_deserialize_xml(
- deserializer: typing.Any,
- value: typing.Any,
-) -> typing.Any:
- try:
- return _deserialize_xml(deserializer, value)
- except DeserializationError:
- _LOGGER.warning(
- "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
- )
- return None
-
-
-class _RestField:
- def __init__(
- self,
- *,
- name: typing.Optional[str] = None,
- type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
- is_discriminator: bool = False,
- visibility: typing.Optional[typing.List[str]] = None,
- default: typing.Any = _UNSET,
- format: typing.Optional[str] = None,
- is_multipart_file_input: bool = False,
- xml: typing.Optional[typing.Dict[str, typing.Any]] = None,
- ):
- self._type = type
- self._rest_name_input = name
- self._module: typing.Optional[str] = None
- self._is_discriminator = is_discriminator
- self._visibility = visibility
- self._is_model = False
- self._default = default
- self._format = format
- self._is_multipart_file_input = is_multipart_file_input
- self._xml = xml if xml is not None else {}
-
- @property
- def _class_type(self) -> typing.Any:
- return getattr(self._type, "args", [None])[0]
-
- @property
- def _rest_name(self) -> str:
- if self._rest_name_input is None:
- raise ValueError("Rest name was never set")
- return self._rest_name_input
-
- def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin
- # by this point, type and rest_name will have a value bc we default
- # them in __new__ of the Model class
- item = obj.get(self._rest_name)
- if item is None:
- return item
- if self._is_model:
- return item
- return _deserialize(self._type, _serialize(item, self._format), rf=self)
-
- def __set__(self, obj: Model, value) -> None:
- if value is None:
- # we want to wipe out entries if users set attr to None
- try:
- obj.__delitem__(self._rest_name)
- except KeyError:
- pass
- return
- if self._is_model:
- if not _is_model(value):
- value = _deserialize(self._type, value)
- obj.__setitem__(self._rest_name, value)
- return
- obj.__setitem__(self._rest_name, _serialize(value, self._format))
-
- def _get_deserialize_callable_from_annotation(
- self, annotation: typing.Any
- ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]:
- return _get_deserialize_callable_from_annotation(annotation, self._module, self)
-
-
-def rest_field(
- *,
- name: typing.Optional[str] = None,
- type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
- visibility: typing.Optional[typing.List[str]] = None,
- default: typing.Any = _UNSET,
- format: typing.Optional[str] = None,
- is_multipart_file_input: bool = False,
- xml: typing.Optional[typing.Dict[str, typing.Any]] = None,
-) -> typing.Any:
- return _RestField(
- name=name,
- type=type,
- visibility=visibility,
- default=default,
- format=format,
- is_multipart_file_input=is_multipart_file_input,
- xml=xml,
- )
-
-
-def rest_discriminator(
- *,
- name: typing.Optional[str] = None,
- type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
- visibility: typing.Optional[typing.List[str]] = None,
- xml: typing.Optional[typing.Dict[str, typing.Any]] = None,
-) -> typing.Any:
- return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml)
-
-
-def serialize_xml(model: Model, exclude_readonly: bool = False) -> str:
- """Serialize a model to XML.
-
- :param Model model: The model to serialize.
- :param bool exclude_readonly: Whether to exclude readonly properties.
- :returns: The XML representation of the model.
- :rtype: str
- """
- return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore
-
-
-def _get_element(
- o: typing.Any,
- exclude_readonly: bool = False,
- parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None,
- wrapped_element: typing.Optional[ET.Element] = None,
-) -> typing.Union[ET.Element, typing.List[ET.Element]]:
- if _is_model(o):
- model_meta = getattr(o, "_xml", {})
-
- # if prop is a model, then use the prop element directly, else generate a wrapper of model
- if wrapped_element is None:
- wrapped_element = _create_xml_element(
- model_meta.get("name", o.__class__.__name__),
- model_meta.get("prefix"),
- model_meta.get("ns"),
- )
-
- readonly_props = []
- if exclude_readonly:
- readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)]
-
- for k, v in o.items():
- # do not serialize readonly properties
- if exclude_readonly and k in readonly_props:
- continue
-
- prop_rest_field = _get_rest_field(o._attr_to_rest_field, k)
- if prop_rest_field:
- prop_meta = getattr(prop_rest_field, "_xml").copy()
- # use the wire name as xml name if no specific name is set
- if prop_meta.get("name") is None:
- prop_meta["name"] = k
- else:
- # additional properties will not have rest field, use the wire name as xml name
- prop_meta = {"name": k}
-
- # if no ns for prop, use model's
- if prop_meta.get("ns") is None and model_meta.get("ns"):
- prop_meta["ns"] = model_meta.get("ns")
- prop_meta["prefix"] = model_meta.get("prefix")
-
- if prop_meta.get("unwrapped", False):
- # unwrapped could only set on array
- wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta))
- elif prop_meta.get("text", False):
- # text could only set on primitive type
- wrapped_element.text = _get_primitive_type_value(v)
- elif prop_meta.get("attribute", False):
- xml_name = prop_meta.get("name", k)
- if prop_meta.get("ns"):
- ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore
- xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore
- # attribute should be primitive type
- wrapped_element.set(xml_name, _get_primitive_type_value(v))
- else:
- # other wrapped prop element
- wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta))
- return wrapped_element
- if isinstance(o, list):
- return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore
- if isinstance(o, dict):
- result = []
- for k, v in o.items():
- result.append(
- _get_wrapped_element(
- v,
- exclude_readonly,
- {
- "name": k,
- "ns": parent_meta.get("ns") if parent_meta else None,
- "prefix": parent_meta.get("prefix") if parent_meta else None,
- },
- )
- )
- return result
-
- # primitive case need to create element based on parent_meta
- if parent_meta:
- return _get_wrapped_element(
- o,
- exclude_readonly,
- {
- "name": parent_meta.get("itemsName", parent_meta.get("name")),
- "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")),
- "ns": parent_meta.get("itemsNs", parent_meta.get("ns")),
- },
- )
-
- raise ValueError("Could not serialize value into xml: " + o)
-
-
-def _get_wrapped_element(
- v: typing.Any,
- exclude_readonly: bool,
- meta: typing.Optional[typing.Dict[str, typing.Any]],
-) -> ET.Element:
- wrapped_element = _create_xml_element(
- meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None
- )
- if isinstance(v, (dict, list)):
- wrapped_element.extend(_get_element(v, exclude_readonly, meta))
- elif _is_model(v):
- _get_element(v, exclude_readonly, meta, wrapped_element)
- else:
- wrapped_element.text = _get_primitive_type_value(v)
- return wrapped_element
-
-
-def _get_primitive_type_value(v) -> str:
- if v is True:
- return "true"
- if v is False:
- return "false"
- if isinstance(v, _Null):
- return ""
- return str(v)
-
-
-def _create_xml_element(tag, prefix=None, ns=None):
- if prefix and ns:
- ET.register_namespace(prefix, ns)
- if ns:
- return ET.Element("{" + ns + "}" + tag)
- return ET.Element(tag)
-
-
-def _deserialize_xml(
- deserializer: typing.Any,
- value: str,
-) -> typing.Any:
- element = ET.fromstring(value) # nosec
- return _deserialize(deserializer, element)
-
-
-def _convert_element(e: ET.Element):
- # dict case
- if len(e.attrib) > 0 or len({child.tag for child in e}) > 1:
- dict_result: typing.Dict[str, typing.Any] = {}
- for child in e:
- if dict_result.get(child.tag) is not None:
- if isinstance(dict_result[child.tag], list):
- dict_result[child.tag].append(_convert_element(child))
- else:
- dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)]
- else:
- dict_result[child.tag] = _convert_element(child)
- dict_result.update(e.attrib)
- return dict_result
- # array case
- if len(e) > 0:
- array_result: typing.List[typing.Any] = []
- for child in e:
- array_result.append(_convert_element(child))
- return array_result
- # primitive case
- return e.text
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_patch.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_patch.py
index 8bcb627aa475..87676c65a8f0 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_patch.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_patch.py
@@ -7,9 +7,9 @@
Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
-from typing import List
-__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
def patch_sdk():
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_serialization.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_serialization.py
deleted file mode 100644
index eb86ea23c965..000000000000
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_serialization.py
+++ /dev/null
@@ -1,2032 +0,0 @@
-# pylint: disable=line-too-long,useless-suppression,too-many-lines
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) Python Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-# pyright: reportUnnecessaryTypeIgnoreComment=false
-
-from base64 import b64decode, b64encode
-import calendar
-import datetime
-import decimal
-import email
-from enum import Enum
-import json
-import logging
-import re
-import sys
-import codecs
-from typing import (
- Dict,
- Any,
- cast,
- Optional,
- Union,
- AnyStr,
- IO,
- Mapping,
- Callable,
- MutableMapping,
- List,
-)
-
-try:
- from urllib import quote # type: ignore
-except ImportError:
- from urllib.parse import quote
-import xml.etree.ElementTree as ET
-
-import isodate # type: ignore
-from typing_extensions import Self
-
-from azure.core.exceptions import DeserializationError, SerializationError
-from azure.core.serialization import NULL as CoreNull
-
-_BOM = codecs.BOM_UTF8.decode(encoding="utf-8")
-
-JSON = MutableMapping[str, Any]
-
-
-class RawDeserializer:
-
- # Accept "text" because we're open minded people...
- JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$")
-
- # Name used in context
- CONTEXT_NAME = "deserialized_data"
-
- @classmethod
- def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any:
- """Decode data according to content-type.
-
- Accept a stream of data as well, but will be load at once in memory for now.
-
- If no content-type, will return the string version (not bytes, not stream)
-
- :param data: Input, could be bytes or stream (will be decoded with UTF8) or text
- :type data: str or bytes or IO
- :param str content_type: The content type.
- :return: The deserialized data.
- :rtype: object
- """
- if hasattr(data, "read"):
- # Assume a stream
- data = cast(IO, data).read()
-
- if isinstance(data, bytes):
- data_as_str = data.decode(encoding="utf-8-sig")
- else:
- # Explain to mypy the correct type.
- data_as_str = cast(str, data)
-
- # Remove Byte Order Mark if present in string
- data_as_str = data_as_str.lstrip(_BOM)
-
- if content_type is None:
- return data
-
- if cls.JSON_REGEXP.match(content_type):
- try:
- return json.loads(data_as_str)
- except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err) from err
- elif "xml" in (content_type or []):
- try:
-
- try:
- if isinstance(data, unicode): # type: ignore
- # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string
- data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore
- except NameError:
- pass
-
- return ET.fromstring(data_as_str) # nosec
- except ET.ParseError as err:
- # It might be because the server has an issue, and returned JSON with
- # content-type XML....
- # So let's try a JSON load, and if it's still broken
- # let's flow the initial exception
- def _json_attemp(data):
- try:
- return True, json.loads(data)
- except ValueError:
- return False, None # Don't care about this one
-
- success, json_result = _json_attemp(data)
- if success:
- return json_result
- # If i'm here, it's not JSON, it's not XML, let's scream
- # and raise the last context in this block (the XML exception)
- # The function hack is because Py2.7 messes up with exception
- # context otherwise.
- _LOGGER.critical("Wasn't XML not JSON, failing")
- raise DeserializationError("XML is invalid") from err
- elif content_type.startswith("text/"):
- return data_as_str
- raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
-
- @classmethod
- def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any:
- """Deserialize from HTTP response.
-
- Use bytes and headers to NOT use any requests/aiohttp or whatever
- specific implementation.
- Headers will tested for "content-type"
-
- :param bytes body_bytes: The body of the response.
- :param dict headers: The headers of the response.
- :returns: The deserialized data.
- :rtype: object
- """
- # Try to use content-type from headers if available
- content_type = None
- if "content-type" in headers:
- content_type = headers["content-type"].split(";")[0].strip().lower()
- # Ouch, this server did not declare what it sent...
- # Let's guess it's JSON...
- # Also, since Autorest was considering that an empty body was a valid JSON,
- # need that test as well....
- else:
- content_type = "application/json"
-
- if body_bytes:
- return cls.deserialize_from_text(body_bytes, content_type)
- return None
-
-
-_LOGGER = logging.getLogger(__name__)
-
-try:
- _long_type = long # type: ignore
-except NameError:
- _long_type = int
-
-TZ_UTC = datetime.timezone.utc
-
-_FLATTEN = re.compile(r"(? None:
- self.additional_properties: Optional[Dict[str, Any]] = {}
- for k in kwargs: # pylint: disable=consider-using-dict-items
- if k not in self._attribute_map:
- _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
- elif k in self._validation and self._validation[k].get("readonly", False):
- _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__)
- else:
- setattr(self, k, kwargs[k])
-
- def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes.
-
- :param object other: The object to compare
- :returns: True if objects are equal
- :rtype: bool
- """
- if isinstance(other, self.__class__):
- return self.__dict__ == other.__dict__
- return False
-
- def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes.
-
- :param object other: The object to compare
- :returns: True if objects are not equal
- :rtype: bool
- """
- return not self.__eq__(other)
-
- def __str__(self) -> str:
- return str(self.__dict__)
-
- @classmethod
- def enable_additional_properties_sending(cls) -> None:
- cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"}
-
- @classmethod
- def is_xml_model(cls) -> bool:
- try:
- cls._xml_map # type: ignore
- except AttributeError:
- return False
- return True
-
- @classmethod
- def _create_xml_node(cls):
- """Create XML node.
-
- :returns: The XML node
- :rtype: xml.etree.ElementTree.Element
- """
- try:
- xml_map = cls._xml_map # type: ignore
- except AttributeError:
- xml_map = {}
-
- return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None))
-
- def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
- """Return the JSON that would be sent to server from this model.
-
- This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`.
-
- If you want XML serialization, you can pass the kwargs is_xml=True.
-
- :param bool keep_readonly: If you want to serialize the readonly attributes
- :returns: A dict JSON compatible object
- :rtype: dict
- """
- serializer = Serializer(self._infer_class_models())
- return serializer._serialize( # type: ignore # pylint: disable=protected-access
- self, keep_readonly=keep_readonly, **kwargs
- )
-
- def as_dict(
- self,
- keep_readonly: bool = True,
- key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer,
- **kwargs: Any
- ) -> JSON:
- """Return a dict that can be serialized using json.dump.
-
- Advanced usage might optionally use a callback as parameter:
-
- .. code::python
-
- def my_key_transformer(key, attr_desc, value):
- return key
-
- Key is the attribute name used in Python. Attr_desc
- is a dict of metadata. Currently contains 'type' with the
- msrest type and 'key' with the RestAPI encoded key.
- Value is the current value in this object.
-
- The string returned will be used to serialize the key.
- If the return type is a list, this is considered hierarchical
- result dict.
-
- See the three examples in this file:
-
- - attribute_transformer
- - full_restapi_key_transformer
- - last_restapi_key_transformer
-
- If you want XML serialization, you can pass the kwargs is_xml=True.
-
- :param bool keep_readonly: If you want to serialize the readonly attributes
- :param function key_transformer: A key transformer function.
- :returns: A dict JSON compatible object
- :rtype: dict
- """
- serializer = Serializer(self._infer_class_models())
- return serializer._serialize( # type: ignore # pylint: disable=protected-access
- self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
- )
-
- @classmethod
- def _infer_class_models(cls):
- try:
- str_models = cls.__module__.rsplit(".", 1)[0]
- models = sys.modules[str_models]
- client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
- if cls.__name__ not in client_models:
- raise ValueError("Not Autorest generated code")
- except Exception: # pylint: disable=broad-exception-caught
- # Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
- client_models = {cls.__name__: cls}
- return client_models
-
- @classmethod
- def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self:
- """Parse a str using the RestAPI syntax and return a model.
-
- :param str data: A str using RestAPI structure. JSON by default.
- :param str content_type: JSON by default, set application/xml if XML.
- :returns: An instance of this model
- :raises DeserializationError: if something went wrong
- :rtype: Self
- """
- deserializer = Deserializer(cls._infer_class_models())
- return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
-
- @classmethod
- def from_dict(
- cls,
- data: Any,
- key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None,
- content_type: Optional[str] = None,
- ) -> Self:
- """Parse a dict using given key extractor return a model.
-
- By default consider key
- extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor
- and last_rest_key_case_insensitive_extractor)
-
- :param dict data: A dict using RestAPI structure
- :param function key_extractors: A key extractor function.
- :param str content_type: JSON by default, set application/xml if XML.
- :returns: An instance of this model
- :raises DeserializationError: if something went wrong
- :rtype: Self
- """
- deserializer = Deserializer(cls._infer_class_models())
- deserializer.key_extractors = ( # type: ignore
- [ # type: ignore
- attribute_key_case_insensitive_extractor,
- rest_key_case_insensitive_extractor,
- last_rest_key_case_insensitive_extractor,
- ]
- if key_extractors is None
- else key_extractors
- )
- return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
-
- @classmethod
- def _flatten_subtype(cls, key, objects):
- if "_subtype_map" not in cls.__dict__:
- return {}
- result = dict(cls._subtype_map[key])
- for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
- return result
-
- @classmethod
- def _classify(cls, response, objects):
- """Check the class _subtype_map for any child classes.
- We want to ignore any inherited _subtype_maps.
-
- :param dict response: The initial data
- :param dict objects: The class objects
- :returns: The class to be used
- :rtype: class
- """
- for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
- subtype_value = None
-
- if not isinstance(response, ET.Element):
- rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
- else:
- subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
- if subtype_value:
- # Try to match base class. Can be class name only
- # (bug to fix in Autorest to support x-ms-discriminator-name)
- if cls.__name__ == subtype_value:
- return cls
- flatten_mapping_type = cls._flatten_subtype(subtype_key, objects)
- try:
- return objects[flatten_mapping_type[subtype_value]] # type: ignore
- except KeyError:
- _LOGGER.warning(
- "Subtype value %s has no mapping, use base class %s.",
- subtype_value,
- cls.__name__,
- )
- break
- else:
- _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__)
- break
- return cls
-
- @classmethod
- def _get_rest_key_parts(cls, attr_key):
- """Get the RestAPI key of this attr, split it and decode part
- :param str attr_key: Attribute key must be in attribute_map.
- :returns: A list of RestAPI part
- :rtype: list
- """
- rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"])
- return [_decode_attribute_map_key(key_part) for key_part in rest_split_key]
-
-
-def _decode_attribute_map_key(key):
- """This decode a key in an _attribute_map to the actual key we want to look at
- inside the received data.
-
- :param str key: A key string from the generated code
- :returns: The decoded key
- :rtype: str
- """
- return key.replace("\\.", ".")
-
-
-class Serializer: # pylint: disable=too-many-public-methods
- """Request object model serializer."""
-
- basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
-
- _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()}
- days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"}
- months = {
- 1: "Jan",
- 2: "Feb",
- 3: "Mar",
- 4: "Apr",
- 5: "May",
- 6: "Jun",
- 7: "Jul",
- 8: "Aug",
- 9: "Sep",
- 10: "Oct",
- 11: "Nov",
- 12: "Dec",
- }
- validation = {
- "min_length": lambda x, y: len(x) < y,
- "max_length": lambda x, y: len(x) > y,
- "minimum": lambda x, y: x < y,
- "maximum": lambda x, y: x > y,
- "minimum_ex": lambda x, y: x <= y,
- "maximum_ex": lambda x, y: x >= y,
- "min_items": lambda x, y: len(x) < y,
- "max_items": lambda x, y: len(x) > y,
- "pattern": lambda x, y: not re.match(y, x, re.UNICODE),
- "unique": lambda x, y: len(x) != len(set(x)),
- "multiple": lambda x, y: x % y != 0,
- }
-
- def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
- self.serialize_type = {
- "iso-8601": Serializer.serialize_iso,
- "rfc-1123": Serializer.serialize_rfc,
- "unix-time": Serializer.serialize_unix,
- "duration": Serializer.serialize_duration,
- "date": Serializer.serialize_date,
- "time": Serializer.serialize_time,
- "decimal": Serializer.serialize_decimal,
- "long": Serializer.serialize_long,
- "bytearray": Serializer.serialize_bytearray,
- "base64": Serializer.serialize_base64,
- "object": self.serialize_object,
- "[]": self.serialize_iter,
- "{}": self.serialize_dict,
- }
- self.dependencies: Dict[str, type] = dict(classes) if classes else {}
- self.key_transformer = full_restapi_key_transformer
- self.client_side_validation = True
-
- def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
- self, target_obj, data_type=None, **kwargs
- ):
- """Serialize data into a string according to type.
-
- :param object target_obj: The data to be serialized.
- :param str data_type: The type to be serialized from.
- :rtype: str, dict
- :raises SerializationError: if serialization fails.
- :returns: The serialized data.
- """
- key_transformer = kwargs.get("key_transformer", self.key_transformer)
- keep_readonly = kwargs.get("keep_readonly", False)
- if target_obj is None:
- return None
-
- attr_name = None
- class_name = target_obj.__class__.__name__
-
- if data_type:
- return self.serialize_data(target_obj, data_type, **kwargs)
-
- if not hasattr(target_obj, "_attribute_map"):
- data_type = type(target_obj).__name__
- if data_type in self.basic_types.values():
- return self.serialize_data(target_obj, data_type, **kwargs)
-
- # Force "is_xml" kwargs if we detect a XML model
- try:
- is_xml_model_serialization = kwargs["is_xml"]
- except KeyError:
- is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model())
-
- serialized = {}
- if is_xml_model_serialization:
- serialized = target_obj._create_xml_node() # pylint: disable=protected-access
- try:
- attributes = target_obj._attribute_map # pylint: disable=protected-access
- for attr, attr_desc in attributes.items():
- attr_name = attr
- if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
- attr_name, {}
- ).get("readonly", False):
- continue
-
- if attr_name == "additional_properties" and attr_desc["key"] == "":
- if target_obj.additional_properties is not None:
- serialized.update(target_obj.additional_properties)
- continue
- try:
-
- orig_attr = getattr(target_obj, attr)
- if is_xml_model_serialization:
- pass # Don't provide "transformer" for XML for now. Keep "orig_attr"
- else: # JSON
- keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr)
- keys = keys if isinstance(keys, list) else [keys]
-
- kwargs["serialization_ctxt"] = attr_desc
- new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs)
-
- if is_xml_model_serialization:
- xml_desc = attr_desc.get("xml", {})
- xml_name = xml_desc.get("name", attr_desc["key"])
- xml_prefix = xml_desc.get("prefix", None)
- xml_ns = xml_desc.get("ns", None)
- if xml_desc.get("attr", False):
- if xml_ns:
- ET.register_namespace(xml_prefix, xml_ns)
- xml_name = "{{{}}}{}".format(xml_ns, xml_name)
- serialized.set(xml_name, new_attr) # type: ignore
- continue
- if xml_desc.get("text", False):
- serialized.text = new_attr # type: ignore
- continue
- if isinstance(new_attr, list):
- serialized.extend(new_attr) # type: ignore
- elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name,
- # we MUST replace the tag with the local tag. But keeping the namespaces.
- if "name" not in getattr(orig_attr, "_xml_map", {}):
- splitted_tag = new_attr.tag.split("}")
- if len(splitted_tag) == 2: # Namespace
- new_attr.tag = "}".join([splitted_tag[0], xml_name])
- else:
- new_attr.tag = xml_name
- serialized.append(new_attr) # type: ignore
- else: # That's a basic type
- # Integrate namespace if necessary
- local_node = _create_xml_node(xml_name, xml_prefix, xml_ns)
- local_node.text = str(new_attr)
- serialized.append(local_node) # type: ignore
- else: # JSON
- for k in reversed(keys): # type: ignore
- new_attr = {k: new_attr}
-
- _new_attr = new_attr
- _serialized = serialized
- for k in keys: # type: ignore
- if k not in _serialized:
- _serialized.update(_new_attr) # type: ignore
- _new_attr = _new_attr[k] # type: ignore
- _serialized = _serialized[k]
- except ValueError as err:
- if isinstance(err, SerializationError):
- raise
-
- except (AttributeError, KeyError, TypeError) as err:
- msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
- raise SerializationError(msg) from err
- return serialized
-
- def body(self, data, data_type, **kwargs):
- """Serialize data intended for a request body.
-
- :param object data: The data to be serialized.
- :param str data_type: The type to be serialized from.
- :rtype: dict
- :raises SerializationError: if serialization fails.
- :raises ValueError: if data is None
- :returns: The serialized request body
- """
-
- # Just in case this is a dict
- internal_data_type_str = data_type.strip("[]{}")
- internal_data_type = self.dependencies.get(internal_data_type_str, None)
- try:
- is_xml_model_serialization = kwargs["is_xml"]
- except KeyError:
- if internal_data_type and issubclass(internal_data_type, Model):
- is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model())
- else:
- is_xml_model_serialization = False
- if internal_data_type and not isinstance(internal_data_type, Enum):
- try:
- deserializer = Deserializer(self.dependencies)
- # Since it's on serialization, it's almost sure that format is not JSON REST
- # We're not able to deal with additional properties for now.
- deserializer.additional_properties_detection = False
- if is_xml_model_serialization:
- deserializer.key_extractors = [ # type: ignore
- attribute_key_case_insensitive_extractor,
- ]
- else:
- deserializer.key_extractors = [
- rest_key_case_insensitive_extractor,
- attribute_key_case_insensitive_extractor,
- last_rest_key_case_insensitive_extractor,
- ]
- data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
- except DeserializationError as err:
- raise SerializationError("Unable to build a model: " + str(err)) from err
-
- return self._serialize(data, data_type, **kwargs)
-
- def url(self, name, data, data_type, **kwargs):
- """Serialize data intended for a URL path.
-
- :param str name: The name of the URL path parameter.
- :param object data: The data to be serialized.
- :param str data_type: The type to be serialized from.
- :rtype: str
- :returns: The serialized URL path
- :raises TypeError: if serialization fails.
- :raises ValueError: if data is None
- """
- try:
- output = self.serialize_data(data, data_type, **kwargs)
- if data_type == "bool":
- output = json.dumps(output)
-
- if kwargs.get("skip_quote") is True:
- output = str(output)
- output = output.replace("{", quote("{")).replace("}", quote("}"))
- else:
- output = quote(str(output), safe="")
- except SerializationError as exc:
- raise TypeError("{} must be type {}.".format(name, data_type)) from exc
- return output
-
- def query(self, name, data, data_type, **kwargs):
- """Serialize data intended for a URL query.
-
- :param str name: The name of the query parameter.
- :param object data: The data to be serialized.
- :param str data_type: The type to be serialized from.
- :rtype: str, list
- :raises TypeError: if serialization fails.
- :raises ValueError: if data is None
- :returns: The serialized query parameter
- """
- try:
- # Treat the list aside, since we don't want to encode the div separator
- if data_type.startswith("["):
- internal_data_type = data_type[1:-1]
- do_quote = not kwargs.get("skip_quote", False)
- return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)
-
- # Not a list, regular serialization
- output = self.serialize_data(data, data_type, **kwargs)
- if data_type == "bool":
- output = json.dumps(output)
- if kwargs.get("skip_quote") is True:
- output = str(output)
- else:
- output = quote(str(output), safe="")
- except SerializationError as exc:
- raise TypeError("{} must be type {}.".format(name, data_type)) from exc
- return str(output)
-
- def header(self, name, data, data_type, **kwargs):
- """Serialize data intended for a request header.
-
- :param str name: The name of the header.
- :param object data: The data to be serialized.
- :param str data_type: The type to be serialized from.
- :rtype: str
- :raises TypeError: if serialization fails.
- :raises ValueError: if data is None
- :returns: The serialized header
- """
- try:
- if data_type in ["[str]"]:
- data = ["" if d is None else d for d in data]
-
- output = self.serialize_data(data, data_type, **kwargs)
- if data_type == "bool":
- output = json.dumps(output)
- except SerializationError as exc:
- raise TypeError("{} must be type {}.".format(name, data_type)) from exc
- return str(output)
-
- def serialize_data(self, data, data_type, **kwargs):
- """Serialize generic data according to supplied data type.
-
- :param object data: The data to be serialized.
- :param str data_type: The type to be serialized from.
- :raises AttributeError: if required data is None.
- :raises ValueError: if data is None
- :raises SerializationError: if serialization fails.
- :returns: The serialized data.
- :rtype: str, int, float, bool, dict, list
- """
- if data is None:
- raise ValueError("No value for given attribute")
-
- try:
- if data is CoreNull:
- return None
- if data_type in self.basic_types.values():
- return self.serialize_basic(data, data_type, **kwargs)
-
- if data_type in self.serialize_type:
- return self.serialize_type[data_type](data, **kwargs)
-
- # If dependencies is empty, try with current data class
- # It has to be a subclass of Enum anyway
- enum_type = self.dependencies.get(data_type, data.__class__)
- if issubclass(enum_type, Enum):
- return Serializer.serialize_enum(data, enum_obj=enum_type)
-
- iter_type = data_type[0] + data_type[-1]
- if iter_type in self.serialize_type:
- return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs)
-
- except (ValueError, TypeError) as err:
- msg = "Unable to serialize value: {!r} as type: {!r}."
- raise SerializationError(msg.format(data, data_type)) from err
- return self._serialize(data, **kwargs)
-
- @classmethod
- def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
- custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
- if custom_serializer:
- return custom_serializer
- if kwargs.get("is_xml", False):
- return cls._xml_basic_types_serializers.get(data_type)
-
- @classmethod
- def serialize_basic(cls, data, data_type, **kwargs):
- """Serialize basic builting data type.
- Serializes objects to str, int, float or bool.
-
- Possible kwargs:
- - basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- - is_xml bool : If set, use xml_basic_types_serializers
-
- :param obj data: Object to be serialized.
- :param str data_type: Type of object in the iterable.
- :rtype: str, int, float, bool
- :return: serialized object
- """
- custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
- if custom_serializer:
- return custom_serializer(data)
- if data_type == "str":
- return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec # pylint: disable=eval-used
-
- @classmethod
- def serialize_unicode(cls, data):
- """Special handling for serializing unicode strings in Py2.
- Encode to UTF-8 if unicode, otherwise handle as a str.
-
- :param str data: Object to be serialized.
- :rtype: str
- :return: serialized object
- """
- try: # If I received an enum, return its value
- return data.value
- except AttributeError:
- pass
-
- try:
- if isinstance(data, unicode): # type: ignore
- # Don't change it, JSON and XML ElementTree are totally able
- # to serialize correctly u'' strings
- return data
- except NameError:
- return str(data)
- return str(data)
-
- def serialize_iter(self, data, iter_type, div=None, **kwargs):
- """Serialize iterable.
-
- Supported kwargs:
- - serialization_ctxt dict : The current entry of _attribute_map, or same format.
- serialization_ctxt['type'] should be same as data_type.
- - is_xml bool : If set, serialize as XML
-
- :param list data: Object to be serialized.
- :param str iter_type: Type of object in the iterable.
- :param str div: If set, this str will be used to combine the elements
- in the iterable into a combined string. Default is 'None'.
- Defaults to False.
- :rtype: list, str
- :return: serialized iterable
- """
- if isinstance(data, str):
- raise SerializationError("Refuse str type as a valid iter type.")
-
- serialization_ctxt = kwargs.get("serialization_ctxt", {})
- is_xml = kwargs.get("is_xml", False)
-
- serialized = []
- for d in data:
- try:
- serialized.append(self.serialize_data(d, iter_type, **kwargs))
- except ValueError as err:
- if isinstance(err, SerializationError):
- raise
- serialized.append(None)
-
- if kwargs.get("do_quote", False):
- serialized = ["" if s is None else quote(str(s), safe="") for s in serialized]
-
- if div:
- serialized = ["" if s is None else str(s) for s in serialized]
- serialized = div.join(serialized)
-
- if "xml" in serialization_ctxt or is_xml:
- # XML serialization is more complicated
- xml_desc = serialization_ctxt.get("xml", {})
- xml_name = xml_desc.get("name")
- if not xml_name:
- xml_name = serialization_ctxt["key"]
-
- # Create a wrap node if necessary (use the fact that Element and list have "append")
- is_wrapped = xml_desc.get("wrapped", False)
- node_name = xml_desc.get("itemsName", xml_name)
- if is_wrapped:
- final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
- else:
- final_result = []
- # All list elements to "local_node"
- for el in serialized:
- if isinstance(el, ET.Element):
- el_node = el
- else:
- el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
- if el is not None: # Otherwise it writes "None" :-p
- el_node.text = str(el)
- final_result.append(el_node)
- return final_result
- return serialized
-
- def serialize_dict(self, attr, dict_type, **kwargs):
- """Serialize a dictionary of objects.
-
- :param dict attr: Object to be serialized.
- :param str dict_type: Type of object in the dictionary.
- :rtype: dict
- :return: serialized dictionary
- """
- serialization_ctxt = kwargs.get("serialization_ctxt", {})
- serialized = {}
- for key, value in attr.items():
- try:
- serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs)
- except ValueError as err:
- if isinstance(err, SerializationError):
- raise
- serialized[self.serialize_unicode(key)] = None
-
- if "xml" in serialization_ctxt:
- # XML serialization is more complicated
- xml_desc = serialization_ctxt["xml"]
- xml_name = xml_desc["name"]
-
- final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
- for key, value in serialized.items():
- ET.SubElement(final_result, key).text = value
- return final_result
-
- return serialized
-
- def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
- """Serialize a generic object.
- This will be handled as a dictionary. If object passed in is not
- a basic type (str, int, float, dict, list) it will simply be
- cast to str.
-
- :param dict attr: Object to be serialized.
- :rtype: dict or str
- :return: serialized object
- """
- if attr is None:
- return None
- if isinstance(attr, ET.Element):
- return attr
- obj_type = type(attr)
- if obj_type in self.basic_types:
- return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs)
- if obj_type is _long_type:
- return self.serialize_long(attr)
- if obj_type is str:
- return self.serialize_unicode(attr)
- if obj_type is datetime.datetime:
- return self.serialize_iso(attr)
- if obj_type is datetime.date:
- return self.serialize_date(attr)
- if obj_type is datetime.time:
- return self.serialize_time(attr)
- if obj_type is datetime.timedelta:
- return self.serialize_duration(attr)
- if obj_type is decimal.Decimal:
- return self.serialize_decimal(attr)
-
- # If it's a model or I know this dependency, serialize as a Model
- if obj_type in self.dependencies.values() or isinstance(attr, Model):
- return self._serialize(attr)
-
- if obj_type == dict:
- serialized = {}
- for key, value in attr.items():
- try:
- serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs)
- except ValueError:
- serialized[self.serialize_unicode(key)] = None
- return serialized
-
- if obj_type == list:
- serialized = []
- for obj in attr:
- try:
- serialized.append(self.serialize_object(obj, **kwargs))
- except ValueError:
- pass
- return serialized
- return str(attr)
-
- @staticmethod
- def serialize_enum(attr, enum_obj=None):
- try:
- result = attr.value
- except AttributeError:
- result = attr
- try:
- enum_obj(result) # type: ignore
- return result
- except ValueError as exc:
- for enum_value in enum_obj: # type: ignore
- if enum_value.value.lower() == str(attr).lower():
- return enum_value.value
- error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj)) from exc
-
- @staticmethod
- def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize bytearray into base-64 string.
-
- :param str attr: Object to be serialized.
- :rtype: str
- :return: serialized base64
- """
- return b64encode(attr).decode()
-
- @staticmethod
- def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize str into base-64 string.
-
- :param str attr: Object to be serialized.
- :rtype: str
- :return: serialized base64
- """
- encoded = b64encode(attr).decode("ascii")
- return encoded.strip("=").replace("+", "-").replace("/", "_")
-
- @staticmethod
- def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize Decimal object to float.
-
- :param decimal attr: Object to be serialized.
- :rtype: float
- :return: serialized decimal
- """
- return float(attr)
-
- @staticmethod
- def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize long (Py2) or int (Py3).
-
- :param int attr: Object to be serialized.
- :rtype: int/long
- :return: serialized long
- """
- return _long_type(attr)
-
- @staticmethod
- def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize Date object into ISO-8601 formatted string.
-
- :param Date attr: Object to be serialized.
- :rtype: str
- :return: serialized date
- """
- if isinstance(attr, str):
- attr = isodate.parse_date(attr)
- t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day)
- return t
-
- @staticmethod
- def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize Time object into ISO-8601 formatted string.
-
- :param datetime.time attr: Object to be serialized.
- :rtype: str
- :return: serialized time
- """
- if isinstance(attr, str):
- attr = isodate.parse_time(attr)
- t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second)
- if attr.microsecond:
- t += ".{:02}".format(attr.microsecond)
- return t
-
- @staticmethod
- def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize TimeDelta object into ISO-8601 formatted string.
-
- :param TimeDelta attr: Object to be serialized.
- :rtype: str
- :return: serialized duration
- """
- if isinstance(attr, str):
- attr = isodate.parse_duration(attr)
- return isodate.duration_isoformat(attr)
-
- @staticmethod
- def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize Datetime object into RFC-1123 formatted string.
-
- :param Datetime attr: Object to be serialized.
- :rtype: str
- :raises TypeError: if format invalid.
- :return: serialized rfc
- """
- try:
- if not attr.tzinfo:
- _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
- utc = attr.utctimetuple()
- except AttributeError as exc:
- raise TypeError("RFC1123 object must be valid Datetime object.") from exc
-
- return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
- Serializer.days[utc.tm_wday],
- utc.tm_mday,
- Serializer.months[utc.tm_mon],
- utc.tm_year,
- utc.tm_hour,
- utc.tm_min,
- utc.tm_sec,
- )
-
- @staticmethod
- def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize Datetime object into ISO-8601 formatted string.
-
- :param Datetime attr: Object to be serialized.
- :rtype: str
- :raises SerializationError: if format invalid.
- :return: serialized iso
- """
- if isinstance(attr, str):
- attr = isodate.parse_datetime(attr)
- try:
- if not attr.tzinfo:
- _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
- utc = attr.utctimetuple()
- if utc.tm_year > 9999 or utc.tm_year < 1:
- raise OverflowError("Hit max or min date")
-
- microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0")
- if microseconds:
- microseconds = "." + microseconds
- date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
- utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec
- )
- return date + microseconds + "Z"
- except (ValueError, OverflowError) as err:
- msg = "Unable to serialize datetime object."
- raise SerializationError(msg) from err
- except AttributeError as err:
- msg = "ISO-8601 object must be valid Datetime object."
- raise TypeError(msg) from err
-
- @staticmethod
- def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize Datetime object into IntTime format.
- This is represented as seconds.
-
- :param Datetime attr: Object to be serialized.
- :rtype: int
- :raises SerializationError: if format invalid
- :return: serialied unix
- """
- if isinstance(attr, int):
- return attr
- try:
- if not attr.tzinfo:
- _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
- return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError as exc:
- raise TypeError("Unix time object must be valid Datetime object.") from exc
-
-
-def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
- key = attr_desc["key"]
- working_data = data
-
- while "." in key:
- # Need the cast, as for some reasons "split" is typed as list[str | Any]
- dict_keys = cast(List[str], _FLATTEN.split(key))
- if len(dict_keys) == 1:
- key = _decode_attribute_map_key(dict_keys[0])
- break
- working_key = _decode_attribute_map_key(dict_keys[0])
- working_data = working_data.get(working_key, data)
- if working_data is None:
- # If at any point while following flatten JSON path see None, it means
- # that all properties under are None as well
- return None
- key = ".".join(dict_keys[1:])
-
- return working_data.get(key)
-
-
-def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
- attr, attr_desc, data
-):
- key = attr_desc["key"]
- working_data = data
-
- while "." in key:
- dict_keys = _FLATTEN.split(key)
- if len(dict_keys) == 1:
- key = _decode_attribute_map_key(dict_keys[0])
- break
- working_key = _decode_attribute_map_key(dict_keys[0])
- working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data)
- if working_data is None:
- # If at any point while following flatten JSON path see None, it means
- # that all properties under are None as well
- return None
- key = ".".join(dict_keys[1:])
-
- if working_data:
- return attribute_key_case_insensitive_extractor(key, None, working_data)
-
-
-def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
- """Extract the attribute in "data" based on the last part of the JSON path key.
-
- :param str attr: The attribute to extract
- :param dict attr_desc: The attribute description
- :param dict data: The data to extract from
- :rtype: object
- :returns: The extracted attribute
- """
- key = attr_desc["key"]
- dict_keys = _FLATTEN.split(key)
- return attribute_key_extractor(dict_keys[-1], None, data)
-
-
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
- """Extract the attribute in "data" based on the last part of the JSON path key.
-
- This is the case insensitive version of "last_rest_key_extractor"
- :param str attr: The attribute to extract
- :param dict attr_desc: The attribute description
- :param dict data: The data to extract from
- :rtype: object
- :returns: The extracted attribute
- """
- key = attr_desc["key"]
- dict_keys = _FLATTEN.split(key)
- return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data)
-
-
-def attribute_key_extractor(attr, _, data):
- return data.get(attr)
-
-
-def attribute_key_case_insensitive_extractor(attr, _, data):
- found_key = None
- lower_attr = attr.lower()
- for key in data:
- if lower_attr == key.lower():
- found_key = key
- break
-
- return data.get(found_key)
-
-
-def _extract_name_from_internal_type(internal_type):
- """Given an internal type XML description, extract correct XML name with namespace.
-
- :param dict internal_type: An model type
- :rtype: tuple
- :returns: A tuple XML name + namespace dict
- """
- internal_type_xml_map = getattr(internal_type, "_xml_map", {})
- xml_name = internal_type_xml_map.get("name", internal_type.__name__)
- xml_ns = internal_type_xml_map.get("ns", None)
- if xml_ns:
- xml_name = "{{{}}}{}".format(xml_ns, xml_name)
- return xml_name
-
-
-def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
- if isinstance(data, dict):
- return None
-
- # Test if this model is XML ready first
- if not isinstance(data, ET.Element):
- return None
-
- xml_desc = attr_desc.get("xml", {})
- xml_name = xml_desc.get("name", attr_desc["key"])
-
- # Look for a children
- is_iter_type = attr_desc["type"].startswith("[")
- is_wrapped = xml_desc.get("wrapped", False)
- internal_type = attr_desc.get("internalType", None)
- internal_type_xml_map = getattr(internal_type, "_xml_map", {})
-
- # Integrate namespace if necessary
- xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None))
- if xml_ns:
- xml_name = "{{{}}}{}".format(xml_ns, xml_name)
-
- # If it's an attribute, that's simple
- if xml_desc.get("attr", False):
- return data.get(xml_name)
-
- # If it's x-ms-text, that's simple too
- if xml_desc.get("text", False):
- return data.text
-
- # Scenario where I take the local name:
- # - Wrapped node
- # - Internal type is an enum (considered basic types)
- # - Internal type has no XML/Name node
- if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)):
- children = data.findall(xml_name)
- # If internal type has a local name and it's not a list, I use that name
- elif not is_iter_type and internal_type and "name" in internal_type_xml_map:
- xml_name = _extract_name_from_internal_type(internal_type)
- children = data.findall(xml_name)
- # That's an array
- else:
- if internal_type: # Complex type, ignore itemsName and use the complex type name
- items_name = _extract_name_from_internal_type(internal_type)
- else:
- items_name = xml_desc.get("itemsName", xml_name)
- children = data.findall(items_name)
-
- if len(children) == 0:
- if is_iter_type:
- if is_wrapped:
- return None # is_wrapped no node, we want None
- return [] # not wrapped, assume empty list
- return None # Assume it's not there, maybe an optional node.
-
- # If is_iter_type and not wrapped, return all found children
- if is_iter_type:
- if not is_wrapped:
- return children
- # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
- )
- return list(children[0]) # Might be empty list and that's ok.
-
- # Here it's not a itertype, we should have found one element only or empty
- if len(children) > 1:
- raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name))
- return children[0]
-
-
-class Deserializer:
- """Response object model deserializer.
-
- :param dict classes: Class type dictionary for deserializing complex types.
- :ivar list key_extractors: Ordered list of extractors to be used by this deserializer.
- """
-
- basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
-
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
-
- def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
- self.deserialize_type = {
- "iso-8601": Deserializer.deserialize_iso,
- "rfc-1123": Deserializer.deserialize_rfc,
- "unix-time": Deserializer.deserialize_unix,
- "duration": Deserializer.deserialize_duration,
- "date": Deserializer.deserialize_date,
- "time": Deserializer.deserialize_time,
- "decimal": Deserializer.deserialize_decimal,
- "long": Deserializer.deserialize_long,
- "bytearray": Deserializer.deserialize_bytearray,
- "base64": Deserializer.deserialize_base64,
- "object": self.deserialize_object,
- "[]": self.deserialize_iter,
- "{}": self.deserialize_dict,
- }
- self.deserialize_expected_types = {
- "duration": (isodate.Duration, datetime.timedelta),
- "iso-8601": (datetime.datetime),
- }
- self.dependencies: Dict[str, type] = dict(classes) if classes else {}
- self.key_extractors = [rest_key_extractor, xml_key_extractor]
- # Additional properties only works if the "rest_key_extractor" is used to
- # extract the keys. Making it to work whatever the key extractor is too much
- # complicated, with no real scenario for now.
- # So adding a flag to disable additional properties detection. This flag should be
- # used if your expect the deserialization to NOT come from a JSON REST syntax.
- # Otherwise, result are unexpected
- self.additional_properties_detection = True
-
- def __call__(self, target_obj, response_data, content_type=None):
- """Call the deserializer to process a REST response.
-
- :param str target_obj: Target data type to deserialize to.
- :param requests.Response response_data: REST response object.
- :param str content_type: Swagger "produces" if available.
- :raises DeserializationError: if deserialization fails.
- :return: Deserialized object.
- :rtype: object
- """
- data = self._unpack_content(response_data, content_type)
- return self._deserialize(target_obj, data)
-
- def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
- """Call the deserializer on a model.
-
- Data needs to be already deserialized as JSON or XML ElementTree
-
- :param str target_obj: Target data type to deserialize to.
- :param object data: Object to deserialize.
- :raises DeserializationError: if deserialization fails.
- :return: Deserialized object.
- :rtype: object
- """
- # This is already a model, go recursive just in case
- if hasattr(data, "_attribute_map"):
- constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
- try:
- for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
- if attr in constants:
- continue
- value = getattr(data, attr)
- if value is None:
- continue
- local_type = mapconfig["type"]
- internal_data_type = local_type.strip("[]{}")
- if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum):
- continue
- setattr(data, attr, self._deserialize(local_type, value))
- return data
- except AttributeError:
- return
-
- response, class_name = self._classify_target(target_obj, data)
-
- if isinstance(response, str):
- return self.deserialize_data(data, response)
- if isinstance(response, type) and issubclass(response, Enum):
- return self.deserialize_enum(data, response)
-
- if data is None or data is CoreNull:
- return data
- try:
- attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
- d_attrs = {}
- for attr, attr_desc in attributes.items():
- # Check empty string. If it's not empty, someone has a real "additionalProperties"...
- if attr == "additional_properties" and attr_desc["key"] == "":
- continue
- raw_value = None
- # Enhance attr_desc with some dynamic data
- attr_desc = attr_desc.copy() # Do a copy, do not change the real one
- internal_data_type = attr_desc["type"].strip("[]{}")
- if internal_data_type in self.dependencies:
- attr_desc["internalType"] = self.dependencies[internal_data_type]
-
- for key_extractor in self.key_extractors:
- found_value = key_extractor(attr, attr_desc, data)
- if found_value is not None:
- if raw_value is not None and raw_value != found_value:
- msg = (
- "Ignoring extracted value '%s' from %s for key '%s'"
- " (duplicate extraction, follow extractors order)"
- )
- _LOGGER.warning(msg, found_value, key_extractor, attr)
- continue
- raw_value = found_value
-
- value = self.deserialize_data(raw_value, attr_desc["type"])
- d_attrs[attr] = value
- except (AttributeError, TypeError, KeyError) as err:
- msg = "Unable to deserialize to object: " + class_name # type: ignore
- raise DeserializationError(msg) from err
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
-
- def _build_additional_properties(self, attribute_map, data):
- if not self.additional_properties_detection:
- return None
- if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "":
- # Check empty string. If it's not empty, someone has a real "additionalProperties"
- return None
- if isinstance(data, ET.Element):
- data = {el.tag: el.text for el in data}
-
- known_keys = {
- _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0])
- for desc in attribute_map.values()
- if desc["key"] != ""
- }
- present_keys = set(data.keys())
- missing_keys = present_keys - known_keys
- return {key: data[key] for key in missing_keys}
-
- def _classify_target(self, target, data):
- """Check to see whether the deserialization target object can
- be classified into a subclass.
- Once classification has been determined, initialize object.
-
- :param str target: The target object type to deserialize to.
- :param str/dict data: The response data to deserialize.
- :return: The classified target object and its class name.
- :rtype: tuple
- """
- if target is None:
- return None, None
-
- if isinstance(target, str):
- try:
- target = self.dependencies[target]
- except KeyError:
- return target, target
-
- try:
- target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
- except AttributeError:
- pass # Target is not a Model, no classify
- return target, target.__class__.__name__ # type: ignore
-
- def failsafe_deserialize(self, target_obj, data, content_type=None):
- """Ignores any errors encountered in deserialization,
- and falls back to not deserializing the object. Recommended
- for use in error deserialization, as we want to return the
- HttpResponseError to users, and not have them deal with
- a deserialization error.
-
- :param str target_obj: The target object type to deserialize to.
- :param str/dict data: The response data to deserialize.
- :param str content_type: Swagger "produces" if available.
- :return: Deserialized object.
- :rtype: object
- """
- try:
- return self(target_obj, data, content_type=content_type)
- except: # pylint: disable=bare-except
- _LOGGER.debug(
- "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
- )
- return None
-
- @staticmethod
- def _unpack_content(raw_data, content_type=None):
- """Extract the correct structure for deserialization.
-
- If raw_data is a PipelineResponse, try to extract the result of RawDeserializer.
- if we can't, raise. Your Pipeline should have a RawDeserializer.
-
- If not a pipeline response and raw_data is bytes or string, use content-type
- to decode it. If no content-type, try JSON.
-
- If raw_data is something else, bypass all logic and return it directly.
-
- :param obj raw_data: Data to be processed.
- :param str content_type: How to parse if raw_data is a string/bytes.
- :raises JSONDecodeError: If JSON is requested and parsing is impossible.
- :raises UnicodeDecodeError: If bytes is not UTF8
- :rtype: object
- :return: Unpacked content.
- """
- # Assume this is enough to detect a Pipeline Response without importing it
- context = getattr(raw_data, "context", {})
- if context:
- if RawDeserializer.CONTEXT_NAME in context:
- return context[RawDeserializer.CONTEXT_NAME]
- raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize")
-
- # Assume this is enough to recognize universal_http.ClientResponse without importing it
- if hasattr(raw_data, "body"):
- return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers)
-
- # Assume this enough to recognize requests.Response without importing it.
- if hasattr(raw_data, "_content_consumed"):
- return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers)
-
- if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"):
- return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore
- return raw_data
-
- def _instantiate_model(self, response, attrs, additional_properties=None):
- """Instantiate a response model passing in deserialized args.
-
- :param Response response: The response model class.
- :param dict attrs: The deserialized response attributes.
- :param dict additional_properties: Additional properties to be set.
- :rtype: Response
- :return: The instantiated response model.
- """
- if callable(response):
- subtype = getattr(response, "_subtype_map", {})
- try:
- readonly = [
- k
- for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
- if v.get("readonly")
- ]
- const = [
- k
- for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
- if v.get("constant")
- ]
- kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
- response_obj = response(**kwargs)
- for attr in readonly:
- setattr(response_obj, attr, attrs.get(attr))
- if additional_properties:
- response_obj.additional_properties = additional_properties # type: ignore
- return response_obj
- except TypeError as err:
- msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err)) from err
- else:
- try:
- for attr, value in attrs.items():
- setattr(response, attr, value)
- return response
- except Exception as exp:
- msg = "Unable to populate response model. "
- msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg) from exp
-
- def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
- """Process data for deserialization according to data type.
-
- :param str data: The response string to be deserialized.
- :param str data_type: The type to deserialize to.
- :raises DeserializationError: if deserialization fails.
- :return: Deserialized object.
- :rtype: object
- """
- if data is None:
- return data
-
- try:
- if not data_type:
- return data
- if data_type in self.basic_types.values():
- return self.deserialize_basic(data, data_type)
- if data_type in self.deserialize_type:
- if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
- return data
-
- is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
- "object",
- "[]",
- r"{}",
- ]
- if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
- return None
- data_val = self.deserialize_type[data_type](data)
- return data_val
-
- iter_type = data_type[0] + data_type[-1]
- if iter_type in self.deserialize_type:
- return self.deserialize_type[iter_type](data, data_type[1:-1])
-
- obj_type = self.dependencies[data_type]
- if issubclass(obj_type, Enum):
- if isinstance(data, ET.Element):
- data = data.text
- return self.deserialize_enum(data, obj_type)
-
- except (ValueError, TypeError, AttributeError) as err:
- msg = "Unable to deserialize response data."
- msg += " Data: {}, {}".format(data, data_type)
- raise DeserializationError(msg) from err
- return self._deserialize(obj_type, data)
-
- def deserialize_iter(self, attr, iter_type):
- """Deserialize an iterable.
-
- :param list attr: Iterable to be deserialized.
- :param str iter_type: The type of object in the iterable.
- :return: Deserialized iterable.
- :rtype: list
- """
- if attr is None:
- return None
- if isinstance(attr, ET.Element): # If I receive an element here, get the children
- attr = list(attr)
- if not isinstance(attr, (list, set)):
- raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr)))
- return [self.deserialize_data(a, iter_type) for a in attr]
-
- def deserialize_dict(self, attr, dict_type):
- """Deserialize a dictionary.
-
- :param dict/list attr: Dictionary to be deserialized. Also accepts
- a list of key, value pairs.
- :param str dict_type: The object type of the items in the dictionary.
- :return: Deserialized dictionary.
- :rtype: dict
- """
- if isinstance(attr, list):
- return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr}
-
- if isinstance(attr, ET.Element):
- # Transform value into {"Key": "value"}
- attr = {el.tag: el.text for el in attr}
- return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
-
- def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
- """Deserialize a generic object.
- This will be handled as a dictionary.
-
- :param dict attr: Dictionary to be deserialized.
- :return: Deserialized object.
- :rtype: dict
- :raises TypeError: if non-builtin datatype encountered.
- """
- if attr is None:
- return None
- if isinstance(attr, ET.Element):
- # Do no recurse on XML, just return the tree as-is
- return attr
- if isinstance(attr, str):
- return self.deserialize_basic(attr, "str")
- obj_type = type(attr)
- if obj_type in self.basic_types:
- return self.deserialize_basic(attr, self.basic_types[obj_type])
- if obj_type is _long_type:
- return self.deserialize_long(attr)
-
- if obj_type == dict:
- deserialized = {}
- for key, value in attr.items():
- try:
- deserialized[key] = self.deserialize_object(value, **kwargs)
- except ValueError:
- deserialized[key] = None
- return deserialized
-
- if obj_type == list:
- deserialized = []
- for obj in attr:
- try:
- deserialized.append(self.deserialize_object(obj, **kwargs))
- except ValueError:
- pass
- return deserialized
-
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
-
- def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
- """Deserialize basic builtin data type from string.
- Will attempt to convert to str, int, float and bool.
- This function will also accept '1', '0', 'true' and 'false' as
- valid bool values.
-
- :param str attr: response string to be deserialized.
- :param str data_type: deserialization data type.
- :return: Deserialized basic type.
- :rtype: str, int, float or bool
- :raises TypeError: if string format is not valid.
- """
- # If we're here, data is supposed to be a basic type.
- # If it's still an XML node, take the text
- if isinstance(attr, ET.Element):
- attr = attr.text
- if not attr:
- if data_type == "str":
- # None or '', node is empty string.
- return ""
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
-
- if data_type == "bool":
- if attr in [True, False, 1, 0]:
- return bool(attr)
- if isinstance(attr, str):
- if attr.lower() in ["true", "1"]:
- return True
- if attr.lower() in ["false", "0"]:
- return False
- raise TypeError("Invalid boolean value: {}".format(attr))
-
- if data_type == "str":
- return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec # pylint: disable=eval-used
-
- @staticmethod
- def deserialize_unicode(data):
- """Preserve unicode objects in Python 2, otherwise return data
- as a string.
-
- :param str data: response string to be deserialized.
- :return: Deserialized string.
- :rtype: str or unicode
- """
- # We might be here because we have an enum modeled as string,
- # and we try to deserialize a partial dict with enum inside
- if isinstance(data, Enum):
- return data
-
- # Consider this is real string
- try:
- if isinstance(data, unicode): # type: ignore
- return data
- except NameError:
- return str(data)
- return str(data)
-
- @staticmethod
- def deserialize_enum(data, enum_obj):
- """Deserialize string into enum object.
-
- If the string is not a valid enum value it will be returned as-is
- and a warning will be logged.
-
- :param str data: Response string to be deserialized. If this value is
- None or invalid it will be returned as-is.
- :param Enum enum_obj: Enum object to deserialize to.
- :return: Deserialized enum object.
- :rtype: Enum
- """
- if isinstance(data, enum_obj) or data is None:
- return data
- if isinstance(data, Enum):
- data = data.value
- if isinstance(data, int):
- # Workaround. We might consider remove it in the future.
- try:
- return list(enum_obj.__members__.values())[data]
- except IndexError as exc:
- error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj)) from exc
- try:
- return enum_obj(str(data))
- except ValueError:
- for enum_value in enum_obj:
- if enum_value.value.lower() == str(data).lower():
- return enum_value
- # We don't fail anymore for unknown value, we deserialize as a string
- _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj)
- return Deserializer.deserialize_unicode(data)
-
- @staticmethod
- def deserialize_bytearray(attr):
- """Deserialize string into bytearray.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized bytearray
- :rtype: bytearray
- :raises TypeError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- return bytearray(b64decode(attr)) # type: ignore
-
- @staticmethod
- def deserialize_base64(attr):
- """Deserialize base64 encoded string into string.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized base64 string
- :rtype: bytearray
- :raises TypeError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore
- attr = attr + padding # type: ignore
- encoded = attr.replace("-", "+").replace("_", "/")
- return b64decode(encoded)
-
- @staticmethod
- def deserialize_decimal(attr):
- """Deserialize string into Decimal object.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized decimal
- :raises DeserializationError: if string format invalid.
- :rtype: decimal
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- try:
- return decimal.Decimal(str(attr)) # type: ignore
- except decimal.DecimalException as err:
- msg = "Invalid decimal {}".format(attr)
- raise DeserializationError(msg) from err
-
- @staticmethod
- def deserialize_long(attr):
- """Deserialize string into long (Py2) or int (Py3).
-
- :param str attr: response string to be deserialized.
- :return: Deserialized int
- :rtype: long or int
- :raises ValueError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- return _long_type(attr) # type: ignore
-
- @staticmethod
- def deserialize_duration(attr):
- """Deserialize ISO-8601 formatted string into TimeDelta object.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized duration
- :rtype: TimeDelta
- :raises DeserializationError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- try:
- duration = isodate.parse_duration(attr)
- except (ValueError, OverflowError, AttributeError) as err:
- msg = "Cannot deserialize duration object."
- raise DeserializationError(msg) from err
- return duration
-
- @staticmethod
- def deserialize_date(attr):
- """Deserialize ISO-8601 formatted string into Date object.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized date
- :rtype: Date
- :raises DeserializationError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
- raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
- # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
- return isodate.parse_date(attr, defaultmonth=0, defaultday=0)
-
- @staticmethod
- def deserialize_time(attr):
- """Deserialize ISO-8601 formatted string into time object.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized time
- :rtype: datetime.time
- :raises DeserializationError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
- raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
- return isodate.parse_time(attr)
-
- @staticmethod
- def deserialize_rfc(attr):
- """Deserialize RFC-1123 formatted string into Datetime object.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized RFC datetime
- :rtype: Datetime
- :raises DeserializationError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- try:
- parsed_date = email.utils.parsedate_tz(attr) # type: ignore
- date_obj = datetime.datetime(
- *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60))
- )
- if not date_obj.tzinfo:
- date_obj = date_obj.astimezone(tz=TZ_UTC)
- except ValueError as err:
- msg = "Cannot deserialize to rfc datetime object."
- raise DeserializationError(msg) from err
- return date_obj
-
- @staticmethod
- def deserialize_iso(attr):
- """Deserialize ISO-8601 formatted string into Datetime object.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized ISO datetime
- :rtype: Datetime
- :raises DeserializationError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- try:
- attr = attr.upper() # type: ignore
- match = Deserializer.valid_date.match(attr)
- if not match:
- raise ValueError("Invalid datetime string: " + attr)
-
- check_decimal = attr.split(".")
- if len(check_decimal) > 1:
- decimal_str = ""
- for digit in check_decimal[1]:
- if digit.isdigit():
- decimal_str += digit
- else:
- break
- if len(decimal_str) > 6:
- attr = attr.replace(decimal_str, decimal_str[0:6])
-
- date_obj = isodate.parse_datetime(attr)
- test_utc = date_obj.utctimetuple()
- if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
- raise OverflowError("Hit max or min date")
- except (ValueError, OverflowError, AttributeError) as err:
- msg = "Cannot deserialize datetime object."
- raise DeserializationError(msg) from err
- return date_obj
-
- @staticmethod
- def deserialize_unix(attr):
- """Serialize Datetime object into IntTime format.
- This is represented as seconds.
-
- :param int attr: Object to be serialized.
- :return: Deserialized datetime
- :rtype: Datetime
- :raises DeserializationError: if format invalid
- """
- if isinstance(attr, ET.Element):
- attr = int(attr.text) # type: ignore
- try:
- attr = int(attr)
- date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC)
- except ValueError as err:
- msg = "Cannot deserialize to unix datetime object."
- raise DeserializationError(msg) from err
- return date_obj
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_utils/model_base.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_utils/model_base.py
index 12926fa98dcf..b4433021b4e5 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_utils/model_base.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_utils/model_base.py
@@ -37,6 +37,7 @@
TZ_UTC = timezone.utc
_T = typing.TypeVar("_T")
+_NONE_TYPE = type(None)
def _timedelta_as_isostr(td: timedelta) -> str:
@@ -171,6 +172,21 @@ def default(self, o): # pylint: disable=too-many-return-statements
r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT"
)
+_ARRAY_ENCODE_MAPPING = {
+ "pipeDelimited": "|",
+ "spaceDelimited": " ",
+ "commaDelimited": ",",
+ "newlineDelimited": "\n",
+}
+
+
+def _deserialize_array_encoded(delimit: str, attr):
+ if isinstance(attr, str):
+ if attr == "":
+ return []
+ return attr.split(delimit)
+ return attr
+
def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime:
"""Deserialize ISO-8601 formatted string into Datetime object.
@@ -202,7 +218,7 @@ def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime:
test_utc = date_obj.utctimetuple()
if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
raise OverflowError("Hit max or min date")
- return date_obj
+ return date_obj # type: ignore[no-any-return]
def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime:
@@ -256,7 +272,7 @@ def _deserialize_time(attr: typing.Union[str, time]) -> time:
"""
if isinstance(attr, time):
return attr
- return isodate.parse_time(attr)
+ return isodate.parse_time(attr) # type: ignore[no-any-return]
def _deserialize_bytes(attr):
@@ -315,6 +331,8 @@ def _deserialize_int_as_str(attr):
def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None):
if annotation is int and rf and rf._format == "str":
return _deserialize_int_as_str
+ if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING:
+ return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format])
if rf and rf._format:
return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format)
return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
@@ -353,9 +371,39 @@ def __contains__(self, key: typing.Any) -> bool:
return key in self._data
def __getitem__(self, key: str) -> typing.Any:
+ # If this key has been deserialized (for mutable types), we need to handle serialization
+ if hasattr(self, "_attr_to_rest_field"):
+ cache_attr = f"_deserialized_{key}"
+ if hasattr(self, cache_attr):
+ rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key)
+ if rf:
+ value = self._data.get(key)
+ if isinstance(value, (dict, list, set)):
+ # For mutable types, serialize and return
+ # But also update _data with serialized form and clear flag
+ # so mutations via this returned value affect _data
+ serialized = _serialize(value, rf._format)
+ # If serialized form is same type (no transformation needed),
+ # return _data directly so mutations work
+ if isinstance(serialized, type(value)) and serialized == value:
+ return self._data.get(key)
+ # Otherwise return serialized copy and clear flag
+ try:
+ object.__delattr__(self, cache_attr)
+ except AttributeError:
+ pass
+ # Store serialized form back
+ self._data[key] = serialized
+ return serialized
return self._data.__getitem__(key)
def __setitem__(self, key: str, value: typing.Any) -> None:
+ # Clear any cached deserialized value when setting through dictionary access
+ cache_attr = f"_deserialized_{key}"
+ try:
+ object.__delattr__(self, cache_attr)
+ except AttributeError:
+ pass
self._data.__setitem__(key, value)
def __delitem__(self, key: str) -> None:
@@ -467,6 +515,8 @@ def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any:
return self._data.setdefault(key, default)
def __eq__(self, other: typing.Any) -> bool:
+ if isinstance(other, _MyMutableMapping):
+ return self._data == other._data
try:
other_model = self.__class__(other)
except Exception:
@@ -483,6 +533,8 @@ def _is_model(obj: typing.Any) -> bool:
def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements
if isinstance(o, list):
+ if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o):
+ return _ARRAY_ENCODE_MAPPING[format].join(o)
return [_serialize(x, format) for x in o]
if isinstance(o, dict):
return {k: _serialize(v, format) for k, v in o.items()}
@@ -758,6 +810,14 @@ def _deserialize_multiple_sequence(
return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers))
+def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool:
+ return (
+ isinstance(deserializer, functools.partial)
+ and isinstance(deserializer.args[0], functools.partial)
+ and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable
+ )
+
+
def _deserialize_sequence(
deserializer: typing.Optional[typing.Callable],
module: typing.Optional[str],
@@ -767,6 +827,19 @@ def _deserialize_sequence(
return obj
if isinstance(obj, ET.Element):
obj = list(obj)
+
+ # encoded string may be deserialized to sequence
+ if isinstance(obj, str) and isinstance(deserializer, functools.partial):
+ # for list[str]
+ if _is_array_encoded_deserializer(deserializer):
+ return deserializer(obj)
+
+ # for list[Union[...]]
+ if isinstance(deserializer.args[0], list):
+ for sub_deserializer in deserializer.args[0]:
+ if _is_array_encoded_deserializer(sub_deserializer):
+ return sub_deserializer(obj)
+
return type(obj)(_deserialize(deserializer, entry, module) for entry in obj)
@@ -817,16 +890,16 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur
# is it optional?
try:
- if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore
+ if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore
if len(annotation.__args__) <= 2: # pyright: ignore
if_obj_deserializer = _get_deserialize_callable_from_annotation(
- next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore
+ next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore
)
return functools.partial(_deserialize_with_optional, if_obj_deserializer)
# the type is Optional[Union[...]], we need to remove the None type from the Union
annotation_copy = copy.copy(annotation)
- annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore
+ annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore
return _get_deserialize_callable_from_annotation(annotation_copy, module, rf)
except AttributeError:
pass
@@ -910,16 +983,20 @@ def _deserialize_with_callable(
return float(value.text) if value.text else None
if deserializer is bool:
return value.text == "true" if value.text else None
+ if deserializer and deserializer in _DESERIALIZE_MAPPING.values():
+ return deserializer(value.text) if value.text else None
+ if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values():
+ return deserializer(value.text) if value.text else None
if deserializer is None:
return value
if deserializer in [int, float, bool]:
return deserializer(value)
if isinstance(deserializer, CaseInsensitiveEnumMeta):
try:
- return deserializer(value)
+ return deserializer(value.text if isinstance(value, ET.Element) else value)
except ValueError:
# for unknown value, return raw value
- return value
+ return value.text if isinstance(value, ET.Element) else value
if isinstance(deserializer, type) and issubclass(deserializer, Model):
return deserializer._deserialize(value, [])
return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value)
@@ -952,7 +1029,7 @@ def _failsafe_deserialize(
) -> typing.Any:
try:
return _deserialize(deserializer, response.json(), module, rf, format)
- except DeserializationError:
+ except Exception: # pylint: disable=broad-except
_LOGGER.warning(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -965,7 +1042,7 @@ def _failsafe_deserialize_xml(
) -> typing.Any:
try:
return _deserialize_xml(deserializer, response.text())
- except DeserializationError:
+ except Exception: # pylint: disable=broad-except
_LOGGER.warning(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -998,7 +1075,11 @@ def __init__(
@property
def _class_type(self) -> typing.Any:
- return getattr(self._type, "args", [None])[0]
+ result = getattr(self._type, "args", [None])[0]
+ # type may be wrapped by nested functools.partial so we need to check for that
+ if isinstance(result, functools.partial):
+ return getattr(result, "args", [None])[0]
+ return result
@property
def _rest_name(self) -> str:
@@ -1009,14 +1090,37 @@ def _rest_name(self) -> str:
def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin
# by this point, type and rest_name will have a value bc we default
# them in __new__ of the Model class
- item = obj.get(self._rest_name)
+ # Use _data.get() directly to avoid triggering __getitem__ which clears the cache
+ item = obj._data.get(self._rest_name)
if item is None:
return item
if self._is_model:
return item
- return _deserialize(self._type, _serialize(item, self._format), rf=self)
+
+ # For mutable types, we want mutations to directly affect _data
+ # Check if we've already deserialized this value
+ cache_attr = f"_deserialized_{self._rest_name}"
+ if hasattr(obj, cache_attr):
+ # Return the value from _data directly (it's been deserialized in place)
+ return obj._data.get(self._rest_name)
+
+ deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self)
+
+ # For mutable types, store the deserialized value back in _data
+ # so mutations directly affect _data
+ if isinstance(deserialized, (dict, list, set)):
+ obj._data[self._rest_name] = deserialized
+ object.__setattr__(obj, cache_attr, True) # Mark as deserialized
+ return deserialized
+
+ return deserialized
def __set__(self, obj: Model, value) -> None:
+ # Clear the cached deserialized object when setting a new value
+ cache_attr = f"_deserialized_{self._rest_name}"
+ if hasattr(obj, cache_attr):
+ object.__delattr__(obj, cache_attr)
+
if value is None:
# we want to wipe out entries if users set attr to None
try:
@@ -1184,7 +1288,7 @@ def _get_wrapped_element(
_get_element(v, exclude_readonly, meta, wrapped_element)
else:
wrapped_element.text = _get_primitive_type_value(v)
- return wrapped_element
+ return wrapped_element # type: ignore[no-any-return]
def _get_primitive_type_value(v) -> str:
@@ -1197,7 +1301,9 @@ def _get_primitive_type_value(v) -> str:
return str(v)
-def _create_xml_element(tag, prefix=None, ns=None):
+def _create_xml_element(
+ tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None
+) -> ET.Element:
if prefix and ns:
ET.register_namespace(prefix, ns)
if ns:
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_utils/serialization.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_utils/serialization.py
index 45a3e44e45cb..81ec1de5922b 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_utils/serialization.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_utils/serialization.py
@@ -821,13 +821,20 @@ def serialize_basic(cls, data, data_type, **kwargs):
:param str data_type: Type of object in the iterable.
:rtype: str, int, float, bool
:return: serialized object
+ :raises TypeError: raise if data_type is not one of str, int, float, bool.
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec # pylint: disable=eval-used
+ if data_type == "int":
+ return int(data)
+ if data_type == "float":
+ return float(data)
+ if data_type == "bool":
+ return bool(data)
+ raise TypeError("Unknown basic data type: {}".format(data_type))
@classmethod
def serialize_unicode(cls, data):
@@ -1757,7 +1764,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return
:param str data_type: deserialization data type.
:return: Deserialized basic type.
:rtype: str, int, float or bool
- :raises TypeError: if string format is not valid.
+ :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool.
"""
# If we're here, data is supposed to be a basic type.
# If it's still an XML node, take the text
@@ -1783,7 +1790,11 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec # pylint: disable=eval-used
+ if data_type == "int":
+ return int(attr)
+ if data_type == "float":
+ return float(attr)
+ raise TypeError("Unknown basic data type: {}".format(data_type))
@staticmethod
def deserialize_unicode(data):
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_version.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_version.py
index 0ec13ea52bbf..ed0855dea5e6 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_version.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/_version.py
@@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-VERSION = "1.0.0"
+VERSION = "1.1.0"
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/_client.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/_client.py
index 986e3241ffcd..c8a870228efa 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/_client.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/_client.py
@@ -46,8 +46,9 @@ class DurableTaskMgmtClient:
:keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is
None.
:paramtype cloud_setting: ~azure.core.AzureClouds
- :keyword api_version: The API version to use for this operation. Default value is "2025-11-01".
- Note that overriding this default value may result in unsupported behavior.
+ :keyword api_version: The API version to use for this operation. Known values are "2026-02-01"
+ and None. Default value is "2026-02-01". Note that overriding this default value may result in
+ unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/_configuration.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/_configuration.py
index 1180f99f67c9..5e2132b7f447 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/_configuration.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/_configuration.py
@@ -33,8 +33,9 @@ class DurableTaskMgmtClientConfiguration: # pylint: disable=too-many-instance-a
:param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is
None.
:type cloud_setting: ~azure.core.AzureClouds
- :keyword api_version: The API version to use for this operation. Default value is "2025-11-01".
- Note that overriding this default value may result in unsupported behavior.
+ :keyword api_version: The API version to use for this operation. Known values are "2026-02-01"
+ and None. Default value is "2026-02-01". Note that overriding this default value may result in
+ unsupported behavior.
:paramtype api_version: str
"""
@@ -46,7 +47,7 @@ def __init__(
cloud_setting: Optional["AzureClouds"] = None,
**kwargs: Any
) -> None:
- api_version: str = kwargs.pop("api_version", "2025-11-01")
+ api_version: str = kwargs.pop("api_version", "2026-02-01")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/_patch.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/_patch.py
index 8bcb627aa475..87676c65a8f0 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/_patch.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/_patch.py
@@ -7,9 +7,9 @@
Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
-from typing import List
-__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
def patch_sdk():
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/operations/_operations.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/operations/_operations.py
index 740f0e8a2fa9..37fea430dd4a 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/operations/_operations.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/operations/_operations.py
@@ -44,11 +44,18 @@
build_retention_policies_get_request,
build_retention_policies_list_by_scheduler_request,
build_retention_policies_update_request,
+ build_schedulers_create_or_update_private_endpoint_connection_request,
build_schedulers_create_or_update_request,
+ build_schedulers_delete_private_endpoint_connection_request,
build_schedulers_delete_request,
+ build_schedulers_get_private_endpoint_connection_request,
+ build_schedulers_get_private_link_request,
build_schedulers_get_request,
build_schedulers_list_by_resource_group_request,
build_schedulers_list_by_subscription_request,
+ build_schedulers_list_private_endpoint_connections_request,
+ build_schedulers_list_private_links_request,
+ build_schedulers_update_private_endpoint_connection_request,
build_schedulers_update_request,
build_task_hubs_create_or_update_request,
build_task_hubs_delete_request,
@@ -140,7 +147,10 @@ def prepare_request(next_link=None):
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.Operation], deserialized.get("value", []))
+ list_of_elem = _deserialize(
+ List[_models.Operation],
+ deserialized.get("value", []),
+ )
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, AsyncList(list_of_elem)
@@ -156,7 +166,10 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
@@ -220,6 +233,1068 @@ async def get(self, resource_group_name: str, scheduler_name: str, **kwargs: Any
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = kwargs.pop("stream", False)
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ if _stream:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if _stream:
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
+ else:
+ deserialized = _deserialize(_models.Scheduler, response.json())
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ resource: Union[_models.Scheduler, JSON, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _content = None
+ if isinstance(resource, (IOBase, bytes)):
+ _content = resource
+ else:
+ _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore
+
+ _request = build_schedulers_create_or_update_request(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ subscription_id=self._config.subscription_id,
+ content_type=content_type,
+ api_version=self._config.api_version,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 201:
+ response_headers["Azure-AsyncOperation"] = self._deserialize(
+ "str", response.headers.get("Azure-AsyncOperation")
+ )
+ response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
+
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ resource: _models.Scheduler,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.Scheduler]:
+ """Create or update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param resource: Resource create parameters. Required.
+ :type resource: ~azure.mgmt.durabletask.models.Scheduler
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ resource: JSON,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.Scheduler]:
+ """Create or update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param resource: Resource create parameters. Required.
+ :type resource: JSON
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ resource: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.Scheduler]:
+ """Create or update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param resource: Resource create parameters. Required.
+ :type resource: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace_async
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ resource: Union[_models.Scheduler, JSON, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.Scheduler]:
+ """Create or update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param resource: Resource create parameters. Is one of the following types: Scheduler, JSON,
+ IO[bytes] Required.
+ :type resource: ~azure.mgmt.durabletask.models.Scheduler or JSON or IO[bytes]
+ :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.Scheduler] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ resource=resource,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ response = pipeline_response.http_response
+ deserialized = _deserialize(_models.Scheduler, response.json())
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(
+ AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ )
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[_models.Scheduler].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[_models.Scheduler](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ async def _update_initial(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ properties: Union[_models.SchedulerUpdate, JSON, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _content = None
+ if isinstance(properties, (IOBase, bytes)):
+ _content = properties
+ else:
+ _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore
+
+ _request = build_schedulers_update_request(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ subscription_id=self._config.subscription_id,
+ content_type=content_type,
+ api_version=self._config.api_version,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
+
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ properties: _models.SchedulerUpdate,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.Scheduler]:
+ """Update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param properties: The resource properties to be updated. Required.
+ :type properties: ~azure.mgmt.durabletask.models.SchedulerUpdate
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ properties: JSON,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.Scheduler]:
+ """Update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param properties: The resource properties to be updated. Required.
+ :type properties: JSON
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ properties: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.Scheduler]:
+ """Update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param properties: The resource properties to be updated. Required.
+ :type properties: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace_async
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ properties: Union[_models.SchedulerUpdate, JSON, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.Scheduler]:
+ """Update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param properties: The resource properties to be updated. Is one of the following types:
+ SchedulerUpdate, JSON, IO[bytes] Required.
+ :type properties: ~azure.mgmt.durabletask.models.SchedulerUpdate or JSON or IO[bytes]
+ :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.Scheduler] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._update_initial(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ properties=properties,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ response = pipeline_response.http_response
+ deserialized = _deserialize(_models.Scheduler, response.json())
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(
+ AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ )
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[_models.Scheduler].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[_models.Scheduler](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ async def _delete_initial(
+ self, resource_group_name: str, scheduler_name: str, **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ _request = build_schedulers_delete_request(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ subscription_id=self._config.subscription_id,
+ api_version=self._config.api_version,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
+
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def begin_delete(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
+ """Delete a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :return: An instance of AsyncLROPoller that returns None
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
+ if cls:
+ return cls(pipeline_response, None, {}) # type: ignore
+
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(
+ AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ )
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[None].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
+
+ @distributed_trace
+ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncItemPaged["_models.Scheduler"]:
+ """List Schedulers by resource group.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :return: An iterator like instance of Scheduler
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[List[_models.Scheduler]] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_schedulers_list_by_resource_group_request(
+ resource_group_name=resource_group_name,
+ subscription_id=self._config.subscription_id,
+ api_version=self._config.api_version,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url(
+ "self._config.base_url", self._config.base_url, "str", skip_quote=True
+ ),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url(
+ "self._config.base_url", self._config.base_url, "str", skip_quote=True
+ ),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ return _request
+
+ async def extract_data(pipeline_response):
+ deserialized = pipeline_response.http_response.json()
+ list_of_elem = _deserialize(
+ List[_models.Scheduler],
+ deserialized.get("value", []),
+ )
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.get("nextLink") or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(get_next, extract_data)
+
+ @distributed_trace
+ def list_by_subscription(self, **kwargs: Any) -> AsyncItemPaged["_models.Scheduler"]:
+ """List Schedulers by subscription.
+
+ :return: An iterator like instance of Scheduler
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[List[_models.Scheduler]] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_schedulers_list_by_subscription_request(
+ subscription_id=self._config.subscription_id,
+ api_version=self._config.api_version,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url(
+ "self._config.base_url", self._config.base_url, "str", skip_quote=True
+ ),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url(
+ "self._config.base_url", self._config.base_url, "str", skip_quote=True
+ ),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ return _request
+
+ async def extract_data(pipeline_response):
+ deserialized = pipeline_response.http_response.json()
+ list_of_elem = _deserialize(
+ List[_models.Scheduler],
+ deserialized.get("value", []),
+ )
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.get("nextLink") or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(get_next, extract_data)
+
+ @distributed_trace_async
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_link_resource_name",
+ "accept",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ async def get_private_link(
+ self, resource_group_name: str, scheduler_name: str, private_link_resource_name: str, **kwargs: Any
+ ) -> _models.SchedulerPrivateLinkResource:
+ """Get a private link resource for the durable task scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param private_link_resource_name: The name of the private link associated with the Azure
+ resource. Required.
+ :type private_link_resource_name: str
+ :return: SchedulerPrivateLinkResource. The SchedulerPrivateLinkResource is compatible with
+ MutableMapping
+ :rtype: ~azure.mgmt.durabletask.models.SchedulerPrivateLinkResource
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[_models.SchedulerPrivateLinkResource] = kwargs.pop("cls", None)
+
+ _request = build_schedulers_get_private_link_request(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ private_link_resource_name=private_link_resource_name,
+ subscription_id=self._config.subscription_id,
+ api_version=self._config.api_version,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = kwargs.pop("stream", False)
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ if _stream:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if _stream:
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
+ else:
+ deserialized = _deserialize(_models.SchedulerPrivateLinkResource, response.json())
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": ["api_version", "subscription_id", "resource_group_name", "scheduler_name", "accept"]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ def list_private_links(
+ self, resource_group_name: str, scheduler_name: str, **kwargs: Any
+ ) -> AsyncItemPaged["_models.SchedulerPrivateLinkResource"]:
+ """List private link resources for the durable task scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :return: An iterator like instance of SchedulerPrivateLinkResource
+ :rtype:
+ ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.durabletask.models.SchedulerPrivateLinkResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[List[_models.SchedulerPrivateLinkResource]] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_schedulers_list_private_links_request(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ subscription_id=self._config.subscription_id,
+ api_version=self._config.api_version,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url(
+ "self._config.base_url", self._config.base_url, "str", skip_quote=True
+ ),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url(
+ "self._config.base_url", self._config.base_url, "str", skip_quote=True
+ ),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ return _request
+
+ async def extract_data(pipeline_response):
+ deserialized = pipeline_response.http_response.json()
+ list_of_elem = _deserialize(
+ List[_models.SchedulerPrivateLinkResource],
+ deserialized.get("value", []),
+ )
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.get("nextLink") or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(get_next, extract_data)
+
+ @distributed_trace_async
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ "accept",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ async def get_private_endpoint_connection(
+ self, resource_group_name: str, scheduler_name: str, private_endpoint_connection_name: str, **kwargs: Any
+ ) -> _models.PrivateEndpointConnection:
+ """Get a private endpoint connection for the durable task scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
+ :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with
+ MutableMapping
+ :rtype: ~azure.mgmt.durabletask.models.PrivateEndpointConnection
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
+
+ _request = build_schedulers_get_private_endpoint_connection_request(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ subscription_id=self._config.subscription_id,
+ api_version=self._config.api_version,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -234,24 +1309,43 @@ async def get(self, resource_group_name: str, scheduler_name: str, **kwargs: Any
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
- deserialized = _deserialize(_models.Scheduler, response.json())
+ deserialized = _deserialize(_models.PrivateEndpointConnection, response.json())
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- async def _create_or_update_initial(
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ "content_type",
+ "accept",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ async def _create_or_update_private_endpoint_connection_initial( # pylint: disable=name-too-long
self,
resource_group_name: str,
scheduler_name: str,
- resource: Union[_models.Scheduler, JSON, IO[bytes]],
+ private_endpoint_connection_name: str,
+ resource: Union[_models.PrivateEndpointConnection, JSON, IO[bytes]],
**kwargs: Any
) -> AsyncIterator[bytes]:
error_map: MutableMapping = {
@@ -275,9 +1369,10 @@ async def _create_or_update_initial(
else:
_content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore
- _request = build_schedulers_create_or_update_request(
+ _request = build_schedulers_create_or_update_private_endpoint_connection_request(
resource_group_name=resource_group_name,
scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
api_version=self._config.api_version,
@@ -290,6 +1385,7 @@ async def _create_or_update_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -303,7 +1399,10 @@ async def _create_or_update_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -313,7 +1412,7 @@ async def _create_or_update_initial(
)
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -321,124 +1420,160 @@ async def _create_or_update_initial(
return deserialized # type: ignore
@overload
- async def begin_create_or_update(
+ async def begin_create_or_update_private_endpoint_connection( # pylint: disable=name-too-long
self,
resource_group_name: str,
scheduler_name: str,
- resource: _models.Scheduler,
+ private_endpoint_connection_name: str,
+ resource: _models.PrivateEndpointConnection,
*,
content_type: str = "application/json",
**kwargs: Any
- ) -> AsyncLROPoller[_models.Scheduler]:
- """Create or update a Scheduler.
+ ) -> AsyncLROPoller[_models.PrivateEndpointConnection]:
+ """Create or update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param resource: Resource create parameters. Required.
- :type resource: ~azure.mgmt.durabletask.models.Scheduler
+ :type resource: ~azure.mgmt.durabletask.models.PrivateEndpointConnection
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :return: An instance of AsyncLROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
- async def begin_create_or_update(
+ async def begin_create_or_update_private_endpoint_connection( # pylint: disable=name-too-long
self,
resource_group_name: str,
scheduler_name: str,
+ private_endpoint_connection_name: str,
resource: JSON,
*,
content_type: str = "application/json",
**kwargs: Any
- ) -> AsyncLROPoller[_models.Scheduler]:
- """Create or update a Scheduler.
+ ) -> AsyncLROPoller[_models.PrivateEndpointConnection]:
+ """Create or update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param resource: Resource create parameters. Required.
:type resource: JSON
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :return: An instance of AsyncLROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
- async def begin_create_or_update(
+ async def begin_create_or_update_private_endpoint_connection( # pylint: disable=name-too-long
self,
resource_group_name: str,
scheduler_name: str,
+ private_endpoint_connection_name: str,
resource: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
- ) -> AsyncLROPoller[_models.Scheduler]:
- """Create or update a Scheduler.
+ ) -> AsyncLROPoller[_models.PrivateEndpointConnection]:
+ """Create or update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param resource: Resource create parameters. Required.
:type resource: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :return: An instance of AsyncLROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
- async def begin_create_or_update(
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ "content_type",
+ "accept",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ async def begin_create_or_update_private_endpoint_connection( # pylint: disable=name-too-long
self,
resource_group_name: str,
scheduler_name: str,
- resource: Union[_models.Scheduler, JSON, IO[bytes]],
+ private_endpoint_connection_name: str,
+ resource: Union[_models.PrivateEndpointConnection, JSON, IO[bytes]],
**kwargs: Any
- ) -> AsyncLROPoller[_models.Scheduler]:
- """Create or update a Scheduler.
+ ) -> AsyncLROPoller[_models.PrivateEndpointConnection]:
+ """Create or update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
- :param resource: Resource create parameters. Is one of the following types: Scheduler, JSON,
- IO[bytes] Required.
- :type resource: ~azure.mgmt.durabletask.models.Scheduler or JSON or IO[bytes]
- :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
+ :param resource: Resource create parameters. Is one of the following types:
+ PrivateEndpointConnection, JSON, IO[bytes] Required.
+ :type resource: ~azure.mgmt.durabletask.models.PrivateEndpointConnection or JSON or IO[bytes]
+ :return: An instance of AsyncLROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.Scheduler] = kwargs.pop("cls", None)
+ cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._create_or_update_initial(
+ raw_result = await self._create_or_update_private_endpoint_connection_initial(
resource_group_name=resource_group_name,
scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
resource=resource,
content_type=content_type,
cls=lambda x, y, z: x,
@@ -451,7 +1586,7 @@ async def begin_create_or_update(
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
- deserialized = _deserialize(_models.Scheduler, response.json())
+ deserialized = _deserialize(_models.PrivateEndpointConnection, response.json())
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -469,21 +1604,37 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller[_models.Scheduler].from_continuation_token(
+ return AsyncLROPoller[_models.PrivateEndpointConnection].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller[_models.Scheduler](
+ return AsyncLROPoller[_models.PrivateEndpointConnection](
self._client, raw_result, get_long_running_output, polling_method # type: ignore
)
- async def _update_initial(
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ "content_type",
+ "accept",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ async def _update_private_endpoint_connection_initial( # pylint: disable=name-too-long
self,
resource_group_name: str,
scheduler_name: str,
- properties: Union[_models.SchedulerUpdate, JSON, IO[bytes]],
+ private_endpoint_connection_name: str,
+ properties: Union[_models.PrivateEndpointConnectionUpdate, JSON, IO[bytes]],
**kwargs: Any
) -> AsyncIterator[bytes]:
error_map: MutableMapping = {
@@ -507,9 +1658,10 @@ async def _update_initial(
else:
_content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore
- _request = build_schedulers_update_request(
+ _request = build_schedulers_update_private_endpoint_connection_request(
resource_group_name=resource_group_name,
scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
api_version=self._config.api_version,
@@ -522,6 +1674,7 @@ async def _update_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -535,7 +1688,10 @@ async def _update_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -543,7 +1699,7 @@ async def _update_initial(
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -551,124 +1707,161 @@ async def _update_initial(
return deserialized # type: ignore
@overload
- async def begin_update(
+ async def begin_update_private_endpoint_connection(
self,
resource_group_name: str,
scheduler_name: str,
- properties: _models.SchedulerUpdate,
+ private_endpoint_connection_name: str,
+ properties: _models.PrivateEndpointConnectionUpdate,
*,
content_type: str = "application/json",
**kwargs: Any
- ) -> AsyncLROPoller[_models.Scheduler]:
- """Update a Scheduler.
+ ) -> AsyncLROPoller[_models.PrivateEndpointConnection]:
+ """Update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param properties: The resource properties to be updated. Required.
- :type properties: ~azure.mgmt.durabletask.models.SchedulerUpdate
+ :type properties: ~azure.mgmt.durabletask.models.PrivateEndpointConnectionUpdate
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :return: An instance of AsyncLROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
- async def begin_update(
+ async def begin_update_private_endpoint_connection(
self,
resource_group_name: str,
scheduler_name: str,
+ private_endpoint_connection_name: str,
properties: JSON,
*,
content_type: str = "application/json",
**kwargs: Any
- ) -> AsyncLROPoller[_models.Scheduler]:
- """Update a Scheduler.
+ ) -> AsyncLROPoller[_models.PrivateEndpointConnection]:
+ """Update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param properties: The resource properties to be updated. Required.
:type properties: JSON
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :return: An instance of AsyncLROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
- async def begin_update(
+ async def begin_update_private_endpoint_connection(
self,
resource_group_name: str,
scheduler_name: str,
+ private_endpoint_connection_name: str,
properties: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
- ) -> AsyncLROPoller[_models.Scheduler]:
- """Update a Scheduler.
+ ) -> AsyncLROPoller[_models.PrivateEndpointConnection]:
+ """Update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param properties: The resource properties to be updated. Required.
:type properties: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :return: An instance of AsyncLROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
- async def begin_update(
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ "content_type",
+ "accept",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ async def begin_update_private_endpoint_connection(
self,
resource_group_name: str,
scheduler_name: str,
- properties: Union[_models.SchedulerUpdate, JSON, IO[bytes]],
+ private_endpoint_connection_name: str,
+ properties: Union[_models.PrivateEndpointConnectionUpdate, JSON, IO[bytes]],
**kwargs: Any
- ) -> AsyncLROPoller[_models.Scheduler]:
- """Update a Scheduler.
+ ) -> AsyncLROPoller[_models.PrivateEndpointConnection]:
+ """Update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param properties: The resource properties to be updated. Is one of the following types:
- SchedulerUpdate, JSON, IO[bytes] Required.
- :type properties: ~azure.mgmt.durabletask.models.SchedulerUpdate or JSON or IO[bytes]
- :return: An instance of AsyncLROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ PrivateEndpointConnectionUpdate, JSON, IO[bytes] Required.
+ :type properties: ~azure.mgmt.durabletask.models.PrivateEndpointConnectionUpdate or JSON or
+ IO[bytes]
+ :return: An instance of AsyncLROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.Scheduler] = kwargs.pop("cls", None)
+ cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._update_initial(
+ raw_result = await self._update_private_endpoint_connection_initial(
resource_group_name=resource_group_name,
scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
properties=properties,
content_type=content_type,
cls=lambda x, y, z: x,
@@ -681,7 +1874,7 @@ async def begin_update(
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
- deserialized = _deserialize(_models.Scheduler, response.json())
+ deserialized = _deserialize(_models.PrivateEndpointConnection, response.json())
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -699,18 +1892,31 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller[_models.Scheduler].from_continuation_token(
+ return AsyncLROPoller[_models.PrivateEndpointConnection].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller[_models.Scheduler](
+ return AsyncLROPoller[_models.PrivateEndpointConnection](
self._client, raw_result, get_long_running_output, polling_method # type: ignore
)
- async def _delete_initial(
- self, resource_group_name: str, scheduler_name: str, **kwargs: Any
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ async def _delete_private_endpoint_connection_initial( # pylint: disable=name-too-long
+ self, resource_group_name: str, scheduler_name: str, private_endpoint_connection_name: str, **kwargs: Any
) -> AsyncIterator[bytes]:
error_map: MutableMapping = {
401: ClientAuthenticationError,
@@ -725,9 +1931,10 @@ async def _delete_initial(
cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- _request = build_schedulers_delete_request(
+ _request = build_schedulers_delete_private_endpoint_connection_request(
resource_group_name=resource_group_name,
scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=self._config.api_version,
headers=_headers,
@@ -738,6 +1945,7 @@ async def _delete_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -751,7 +1959,10 @@ async def _delete_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -759,7 +1970,7 @@ async def _delete_initial(
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -767,14 +1978,32 @@ async def _delete_initial(
return deserialized # type: ignore
@distributed_trace_async
- async def begin_delete(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
- """Delete a Scheduler.
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ async def begin_delete_private_endpoint_connection(
+ self, resource_group_name: str, scheduler_name: str, private_endpoint_connection_name: str, **kwargs: Any
+ ) -> AsyncLROPoller[None]:
+ """Delete a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:return: An instance of AsyncLROPoller that returns None
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -787,9 +2016,10 @@ async def begin_delete(self, resource_group_name: str, scheduler_name: str, **kw
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial(
+ raw_result = await self._delete_private_endpoint_connection_initial(
resource_group_name=resource_group_name,
scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
@@ -824,20 +2054,32 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
- def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncItemPaged["_models.Scheduler"]:
- """List Schedulers by resource group.
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": ["api_version", "subscription_id", "resource_group_name", "scheduler_name", "accept"]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ def list_private_endpoint_connections(
+ self, resource_group_name: str, scheduler_name: str, **kwargs: Any
+ ) -> AsyncItemPaged["_models.PrivateEndpointConnection"]:
+ """List private endpoint connections for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :return: An iterator like instance of Scheduler
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.durabletask.models.Scheduler]
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :return: An iterator like instance of PrivateEndpointConnection
+ :rtype:
+ ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
- cls: ClsType[List[_models.Scheduler]] = kwargs.pop("cls", None)
+ cls: ClsType[List[_models.PrivateEndpointConnection]] = kwargs.pop("cls", None)
error_map: MutableMapping = {
401: ClientAuthenticationError,
@@ -850,8 +2092,9 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy
def prepare_request(next_link=None):
if not next_link:
- _request = build_schedulers_list_by_resource_group_request(
+ _request = build_schedulers_list_private_endpoint_connections_request(
resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
subscription_id=self._config.subscription_id,
api_version=self._config.api_version,
headers=_headers,
@@ -888,91 +2131,10 @@ def prepare_request(next_link=None):
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.Scheduler], deserialized.get("value", []))
- if cls:
- list_of_elem = cls(list_of_elem) # type: ignore
- return deserialized.get("nextLink") or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- _request = prepare_request(next_link)
-
- _stream = False
- pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- _request, stream=_stream, **kwargs
+ list_of_elem = _deserialize(
+ List[_models.PrivateEndpointConnection],
+ deserialized.get("value", []),
)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
- raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(get_next, extract_data)
-
- @distributed_trace
- def list_by_subscription(self, **kwargs: Any) -> AsyncItemPaged["_models.Scheduler"]:
- """List Schedulers by subscription.
-
- :return: An iterator like instance of Scheduler
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.durabletask.models.Scheduler]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- _headers = kwargs.pop("headers", {}) or {}
- _params = kwargs.pop("params", {}) or {}
-
- cls: ClsType[List[_models.Scheduler]] = kwargs.pop("cls", None)
-
- error_map: MutableMapping = {
- 401: ClientAuthenticationError,
- 404: ResourceNotFoundError,
- 409: ResourceExistsError,
- 304: ResourceNotModifiedError,
- }
- error_map.update(kwargs.pop("error_map", {}) or {})
-
- def prepare_request(next_link=None):
- if not next_link:
-
- _request = build_schedulers_list_by_subscription_request(
- subscription_id=self._config.subscription_id,
- api_version=self._config.api_version,
- headers=_headers,
- params=_params,
- )
- path_format_arguments = {
- "endpoint": self._serialize.url(
- "self._config.base_url", self._config.base_url, "str", skip_quote=True
- ),
- }
- _request.url = self._client.format_url(_request.url, **path_format_arguments)
-
- else:
- # make call to next link with the client's api-version
- _parsed_next_link = urllib.parse.urlparse(next_link)
- _next_request_params = case_insensitive_dict(
- {
- key: [urllib.parse.quote(v) for v in value]
- for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
- }
- )
- _next_request_params["api-version"] = self._config.api_version
- _request = HttpRequest(
- "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
- )
- path_format_arguments = {
- "endpoint": self._serialize.url(
- "self._config.base_url", self._config.base_url, "str", skip_quote=True
- ),
- }
- _request.url = self._client.format_url(_request.url, **path_format_arguments)
-
- return _request
-
- async def extract_data(pipeline_response):
- deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.Scheduler], deserialized.get("value", []))
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, AsyncList(list_of_elem)
@@ -988,7 +2150,10 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
@@ -1057,6 +2222,7 @@ async def get(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1071,11 +2237,14 @@ async def get(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.TaskHub, response.json())
@@ -1129,6 +2298,7 @@ async def _create_or_update_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1142,7 +2312,10 @@ async def _create_or_update_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -1152,7 +2325,7 @@ async def _create_or_update_initial(
)
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -1361,6 +2534,7 @@ async def _delete_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1374,7 +2548,10 @@ async def _delete_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -1382,7 +2559,7 @@ async def _delete_initial(
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -1521,7 +2698,10 @@ def prepare_request(next_link=None):
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.TaskHub], deserialized.get("value", []))
+ list_of_elem = _deserialize(
+ List[_models.TaskHub],
+ deserialized.get("value", []),
+ )
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, AsyncList(list_of_elem)
@@ -1537,7 +2717,10 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
@@ -1568,7 +2751,7 @@ def __init__(self, *args, **kwargs) -> None:
params_added_on={
"2025-04-01-preview": ["api_version", "subscription_id", "resource_group_name", "scheduler_name", "accept"]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
async def get(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> _models.RetentionPolicy:
"""Get a Retention Policy.
@@ -1608,6 +2791,7 @@ async def get(self, resource_group_name: str, scheduler_name: str, **kwargs: Any
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1622,11 +2806,14 @@ async def get(self, resource_group_name: str, scheduler_name: str, **kwargs: Any
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.RetentionPolicy, response.json())
@@ -1647,7 +2834,7 @@ async def get(self, resource_group_name: str, scheduler_name: str, **kwargs: Any
"accept",
]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
async def _create_or_replace_initial(
self,
@@ -1692,6 +2879,7 @@ async def _create_or_replace_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1705,7 +2893,10 @@ async def _create_or_replace_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -1715,7 +2906,7 @@ async def _create_or_replace_initial(
)
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -1819,7 +3010,7 @@ async def begin_create_or_replace(
"accept",
]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
async def begin_create_or_replace(
self,
@@ -1907,7 +3098,7 @@ def get_long_running_output(pipeline_response):
"accept",
]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
async def _update_initial(
self,
@@ -1952,6 +3143,7 @@ async def _update_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1965,7 +3157,10 @@ async def _update_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -1973,7 +3168,7 @@ async def _update_initial(
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -2077,7 +3272,7 @@ async def begin_update(
"accept",
]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
async def begin_update(
self,
@@ -2158,7 +3353,7 @@ def get_long_running_output(pipeline_response):
params_added_on={
"2025-04-01-preview": ["api_version", "subscription_id", "resource_group_name", "scheduler_name"]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
async def _delete_initial(
self, resource_group_name: str, scheduler_name: str, **kwargs: Any
@@ -2189,6 +3384,7 @@ async def _delete_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -2202,7 +3398,10 @@ async def _delete_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -2210,7 +3409,7 @@ async def _delete_initial(
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -2223,7 +3422,7 @@ async def _delete_initial(
params_added_on={
"2025-04-01-preview": ["api_version", "subscription_id", "resource_group_name", "scheduler_name"]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
async def begin_delete(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
"""Delete a Retention Policy.
@@ -2287,7 +3486,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
params_added_on={
"2025-04-01-preview": ["api_version", "subscription_id", "resource_group_name", "scheduler_name", "accept"]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
def list_by_scheduler(
self, resource_group_name: str, scheduler_name: str, **kwargs: Any
@@ -2358,7 +3557,10 @@ def prepare_request(next_link=None):
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.RetentionPolicy], deserialized.get("value", []))
+ list_of_elem = _deserialize(
+ List[_models.RetentionPolicy],
+ deserialized.get("value", []),
+ )
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, AsyncList(list_of_elem)
@@ -2374,7 +3576,10 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/operations/_patch.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/operations/_patch.py
index 8bcb627aa475..87676c65a8f0 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/operations/_patch.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/aio/operations/_patch.py
@@ -7,9 +7,9 @@
Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
-from typing import List
-__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
def patch_sdk():
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/__init__.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/__init__.py
index 556262304d60..5dc55ea84dcc 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/__init__.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/__init__.py
@@ -19,12 +19,20 @@
ErrorResponse,
Operation,
OperationDisplay,
+ OptionalPropertiesUpdateableProperties,
+ PrivateEndpoint,
+ PrivateEndpointConnection,
+ PrivateEndpointConnectionProperties,
+ PrivateEndpointConnectionUpdate,
+ PrivateLinkResourceProperties,
+ PrivateLinkServiceConnectionState,
ProxyResource,
Resource,
RetentionPolicy,
RetentionPolicyDetails,
RetentionPolicyProperties,
Scheduler,
+ SchedulerPrivateLinkResource,
SchedulerProperties,
SchedulerPropertiesUpdate,
SchedulerSku,
@@ -40,7 +48,10 @@
ActionType,
CreatedByType,
Origin,
+ PrivateEndpointConnectionProvisioningState,
+ PrivateEndpointServiceConnectionStatus,
ProvisioningState,
+ PublicNetworkAccess,
PurgeableOrchestrationState,
RedundancyState,
SchedulerSkuName,
@@ -55,12 +66,20 @@
"ErrorResponse",
"Operation",
"OperationDisplay",
+ "OptionalPropertiesUpdateableProperties",
+ "PrivateEndpoint",
+ "PrivateEndpointConnection",
+ "PrivateEndpointConnectionProperties",
+ "PrivateEndpointConnectionUpdate",
+ "PrivateLinkResourceProperties",
+ "PrivateLinkServiceConnectionState",
"ProxyResource",
"Resource",
"RetentionPolicy",
"RetentionPolicyDetails",
"RetentionPolicyProperties",
"Scheduler",
+ "SchedulerPrivateLinkResource",
"SchedulerProperties",
"SchedulerPropertiesUpdate",
"SchedulerSku",
@@ -73,7 +92,10 @@
"ActionType",
"CreatedByType",
"Origin",
+ "PrivateEndpointConnectionProvisioningState",
+ "PrivateEndpointServiceConnectionStatus",
"ProvisioningState",
+ "PublicNetworkAccess",
"PurgeableOrchestrationState",
"RedundancyState",
"SchedulerSkuName",
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/_enums.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/_enums.py
index 898a1f9f80c1..7ef293ceac02 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/_enums.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/_enums.py
@@ -45,6 +45,32 @@ class Origin(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Indicates the operation is initiated by a user or system."""
+class PrivateEndpointConnectionProvisioningState( # pylint: disable=name-too-long
+ str, Enum, metaclass=CaseInsensitiveEnumMeta
+):
+ """The current provisioning state."""
+
+ SUCCEEDED = "Succeeded"
+ """Connection has been provisioned."""
+ CREATING = "Creating"
+ """Connection is being created."""
+ DELETING = "Deleting"
+ """Connection is being deleted."""
+ FAILED = "Failed"
+ """Connection provisioning has failed."""
+
+
+class PrivateEndpointServiceConnectionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """The private endpoint connection status."""
+
+ PENDING = "Pending"
+ """Connection waiting for approval or rejection."""
+ APPROVED = "Approved"
+ """Connection approved."""
+ REJECTED = "Rejected"
+ """Connection Rejected."""
+
+
class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The status of the current operation."""
@@ -55,41 +81,50 @@ class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
CANCELED = "Canceled"
"""Resource creation was canceled."""
PROVISIONING = "Provisioning"
- """The resource is being provisioned"""
+ """The resource is being provisioned."""
UPDATING = "Updating"
- """The resource is updating"""
+ """The resource is updating."""
DELETING = "Deleting"
- """The resource is being deleted"""
+ """The resource is being deleted."""
ACCEPTED = "Accepted"
- """The resource create request has been accepted"""
+ """The resource create request has been accepted."""
+
+
+class PublicNetworkAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """State of the public network access."""
+
+ ENABLED = "Enabled"
+ """The public network access is enabled."""
+ DISABLED = "Disabled"
+ """The public network access is disabled."""
class PurgeableOrchestrationState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Purgeable orchestration state to be used in retention policies."""
COMPLETED = "Completed"
- """The orchestration is completed"""
+ """The orchestration is completed."""
FAILED = "Failed"
- """The orchestration is failed"""
+ """The orchestration is failed."""
TERMINATED = "Terminated"
- """The orchestration is terminated"""
+ """The orchestration is terminated."""
CANCELED = "Canceled"
- """The orchestration is canceled"""
+ """The orchestration is canceled."""
class RedundancyState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The state of the resource redundancy."""
NONE = "None"
- """The resource is not redundant"""
+ """The resource is not redundant."""
ZONE = "Zone"
- """The resource is zone redundant"""
+ """The resource is zone redundant."""
class SchedulerSkuName(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The name of the Stock Keeping Unit (SKU) of a Durable Task Scheduler."""
DEDICATED = "Dedicated"
- """Dedicated SKU"""
+ """Dedicated SKU."""
CONSUMPTION = "Consumption"
- """Consumption SKU"""
+ """Consumption SKU."""
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/_models.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/_models.py
index 41cdfbdb157f..bfa5939e6772 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/_models.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/_models.py
@@ -1,4 +1,4 @@
-# pylint: disable=line-too-long,useless-suppression
+# pylint: disable=line-too-long,useless-suppression,too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -147,7 +147,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
class OperationDisplay(_Model):
- """Localized display information for and operation.
+ """Localized display information for an operation.
:ivar provider: The localized friendly form of the resource provider name, e.g. "Microsoft
Monitoring Insights" or "Microsoft Compute".
@@ -177,6 +177,57 @@ class OperationDisplay(_Model):
views."""
+class OptionalPropertiesUpdateableProperties(_Model):
+ """The template for adding optional properties.
+
+ :ivar private_endpoint: The private endpoint resource.
+ :vartype private_endpoint: ~azure.mgmt.durabletask.models.PrivateEndpoint
+ :ivar private_link_service_connection_state: A collection of information about the state of the
+ connection between service consumer and provider.
+ :vartype private_link_service_connection_state:
+ ~azure.mgmt.durabletask.models.PrivateLinkServiceConnectionState
+ """
+
+ private_endpoint: Optional["_models.PrivateEndpoint"] = rest_field(
+ name="privateEndpoint", visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The private endpoint resource."""
+ private_link_service_connection_state: Optional["_models.PrivateLinkServiceConnectionState"] = rest_field(
+ name="privateLinkServiceConnectionState", visibility=["read", "create", "update", "delete", "query"]
+ )
+ """A collection of information about the state of the connection between service consumer and
+ provider."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ private_endpoint: Optional["_models.PrivateEndpoint"] = None,
+ private_link_service_connection_state: Optional["_models.PrivateLinkServiceConnectionState"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class PrivateEndpoint(_Model):
+ """The private endpoint resource.
+
+ :ivar id: The resource identifier of the private endpoint.
+ :vartype id: str
+ """
+
+ id: Optional[str] = rest_field(visibility=["read"])
+ """The resource identifier of the private endpoint."""
+
+
class Resource(_Model):
"""Resource.
@@ -205,6 +256,214 @@ class Resource(_Model):
"""Azure Resource Manager metadata containing createdBy and modifiedBy information."""
+class PrivateEndpointConnection(Resource):
+ """A private endpoint connection resource.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
+ information.
+ :vartype system_data: ~azure.mgmt.durabletask.models.SystemData
+ :ivar properties: The private endpoint connection properties.
+ :vartype properties: ~azure.mgmt.durabletask.models.PrivateEndpointConnectionProperties
+ """
+
+ properties: Optional["_models.PrivateEndpointConnectionProperties"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The private endpoint connection properties."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ properties: Optional["_models.PrivateEndpointConnectionProperties"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class PrivateEndpointConnectionProperties(_Model):
+ """Properties of the private endpoint connection.
+
+ :ivar group_ids: The group ids for the private endpoint resource.
+ :vartype group_ids: list[str]
+ :ivar private_endpoint: The private endpoint resource.
+ :vartype private_endpoint: ~azure.mgmt.durabletask.models.PrivateEndpoint
+ :ivar private_link_service_connection_state: A collection of information about the state of the
+ connection between service consumer and provider. Required.
+ :vartype private_link_service_connection_state:
+ ~azure.mgmt.durabletask.models.PrivateLinkServiceConnectionState
+ :ivar provisioning_state: The provisioning state of the private endpoint connection resource.
+ Known values are: "Succeeded", "Creating", "Deleting", and "Failed".
+ :vartype provisioning_state: str or
+ ~azure.mgmt.durabletask.models.PrivateEndpointConnectionProvisioningState
+ """
+
+ group_ids: Optional[list[str]] = rest_field(name="groupIds", visibility=["read"])
+ """The group ids for the private endpoint resource."""
+ private_endpoint: Optional["_models.PrivateEndpoint"] = rest_field(
+ name="privateEndpoint", visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The private endpoint resource."""
+ private_link_service_connection_state: "_models.PrivateLinkServiceConnectionState" = rest_field(
+ name="privateLinkServiceConnectionState", visibility=["read", "create", "update", "delete", "query"]
+ )
+ """A collection of information about the state of the connection between service consumer and
+ provider. Required."""
+ provisioning_state: Optional[Union[str, "_models.PrivateEndpointConnectionProvisioningState"]] = rest_field(
+ name="provisioningState", visibility=["read"]
+ )
+ """The provisioning state of the private endpoint connection resource. Known values are:
+ \"Succeeded\", \"Creating\", \"Deleting\", and \"Failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ private_link_service_connection_state: "_models.PrivateLinkServiceConnectionState",
+ private_endpoint: Optional["_models.PrivateEndpoint"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class PrivateEndpointConnectionUpdate(_Model):
+ """PATCH model for private endpoint connections.
+
+ :ivar properties: The private endpoint connection properties.
+ :vartype properties: ~azure.mgmt.durabletask.models.OptionalPropertiesUpdateableProperties
+ """
+
+ properties: Optional["_models.OptionalPropertiesUpdateableProperties"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The private endpoint connection properties."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ properties: Optional["_models.OptionalPropertiesUpdateableProperties"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class PrivateLinkResourceProperties(_Model):
+ """Properties of a private link resource.
+
+ :ivar group_id: The private link resource group id.
+ :vartype group_id: str
+ :ivar required_members: The private link resource required member names.
+ :vartype required_members: list[str]
+ :ivar required_zone_names: The private link resource private link DNS zone name.
+ :vartype required_zone_names: list[str]
+ """
+
+ group_id: Optional[str] = rest_field(name="groupId", visibility=["read"])
+ """The private link resource group id."""
+ required_members: Optional[list[str]] = rest_field(name="requiredMembers", visibility=["read"])
+ """The private link resource required member names."""
+ required_zone_names: Optional[list[str]] = rest_field(
+ name="requiredZoneNames", visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The private link resource private link DNS zone name."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ required_zone_names: Optional[list[str]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class PrivateLinkServiceConnectionState(_Model):
+ """A collection of information about the state of the connection between service consumer and
+ provider.
+
+ :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Known values are: "Pending", "Approved", and "Rejected".
+ :vartype status: str or ~azure.mgmt.durabletask.models.PrivateEndpointServiceConnectionStatus
+ :ivar description: The reason for approval/rejection of the connection.
+ :vartype description: str
+ :ivar actions_required: A message indicating if changes on the service provider require any
+ updates on the consumer.
+ :vartype actions_required: str
+ """
+
+ status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Indicates whether the connection has been Approved/Rejected/Removed by the owner of the
+ service. Known values are: \"Pending\", \"Approved\", and \"Rejected\"."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The reason for approval/rejection of the connection."""
+ actions_required: Optional[str] = rest_field(
+ name="actionsRequired", visibility=["read", "create", "update", "delete", "query"]
+ )
+ """A message indicating if changes on the service provider require any updates on the consumer."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = None,
+ description: Optional[str] = None,
+ actions_required: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
class ProxyResource(Resource):
"""Proxy Resource.
@@ -435,6 +694,47 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
+class SchedulerPrivateLinkResource(Resource):
+ """A private link resource.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
+ information.
+ :vartype system_data: ~azure.mgmt.durabletask.models.SystemData
+ :ivar properties: Resource properties.
+ :vartype properties: ~azure.mgmt.durabletask.models.PrivateLinkResourceProperties
+ """
+
+ properties: Optional["_models.PrivateLinkResourceProperties"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Resource properties."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ properties: Optional["_models.PrivateLinkResourceProperties"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
class SchedulerProperties(_Model):
"""Details of the Scheduler.
@@ -448,6 +748,12 @@ class SchedulerProperties(_Model):
:vartype ip_allowlist: list[str]
:ivar sku: SKU of the durable task scheduler. Required.
:vartype sku: ~azure.mgmt.durabletask.models.SchedulerSku
+ :ivar public_network_access: Allow or disallow public network access to durable task scheduler.
+ Known values are: "Enabled" and "Disabled".
+ :vartype public_network_access: str or ~azure.mgmt.durabletask.models.PublicNetworkAccess
+ :ivar private_endpoint_connections: The private endpoints exposed by this resource.
+ :vartype private_endpoint_connections:
+ list[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
"""
provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field(
@@ -461,6 +767,15 @@ class SchedulerProperties(_Model):
"""IP allow list for durable task scheduler. Values can be IPv4, IPv6 or CIDR. Required."""
sku: "_models.SchedulerSku" = rest_field(visibility=["read", "create", "update", "delete", "query"])
"""SKU of the durable task scheduler. Required."""
+ public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = rest_field(
+ name="publicNetworkAccess", visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Allow or disallow public network access to durable task scheduler. Known values are:
+ \"Enabled\" and \"Disabled\"."""
+ private_endpoint_connections: Optional[list["_models.PrivateEndpointConnection"]] = rest_field(
+ name="privateEndpointConnections", visibility=["read"]
+ )
+ """The private endpoints exposed by this resource."""
@overload
def __init__(
@@ -468,6 +783,7 @@ def __init__(
*,
ip_allowlist: list[str],
sku: "_models.SchedulerSku",
+ public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None,
) -> None: ...
@overload
@@ -493,6 +809,9 @@ class SchedulerPropertiesUpdate(_Model):
:vartype ip_allowlist: list[str]
:ivar sku: SKU of the durable task scheduler.
:vartype sku: ~azure.mgmt.durabletask.models.SchedulerSkuUpdate
+ :ivar public_network_access: Allow or disallow public network access to durable task scheduler.
+ Known values are: "Enabled" and "Disabled".
+ :vartype public_network_access: str or ~azure.mgmt.durabletask.models.PublicNetworkAccess
"""
provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field(
@@ -508,6 +827,11 @@ class SchedulerPropertiesUpdate(_Model):
"""IP allow list for durable task scheduler. Values can be IPv4, IPv6 or CIDR."""
sku: Optional["_models.SchedulerSkuUpdate"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
"""SKU of the durable task scheduler."""
+ public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = rest_field(
+ name="publicNetworkAccess", visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Allow or disallow public network access to durable task scheduler. Known values are:
+ \"Enabled\" and \"Disabled\"."""
@overload
def __init__(
@@ -515,6 +839,7 @@ def __init__(
*,
ip_allowlist: Optional[list[str]] = None,
sku: Optional["_models.SchedulerSkuUpdate"] = None,
+ public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None,
) -> None: ...
@overload
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/_patch.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/_patch.py
index 8bcb627aa475..87676c65a8f0 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/_patch.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/models/_patch.py
@@ -7,9 +7,9 @@
Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
-from typing import List
-__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
def patch_sdk():
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/operations/_operations.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/operations/_operations.py
index 2d6ac5ab1ec9..18abe743dff9 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/operations/_operations.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/operations/_operations.py
@@ -51,7 +51,7 @@ def build_operations_list_request(**kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -72,7 +72,7 @@ def build_schedulers_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -101,7 +101,7 @@ def build_schedulers_create_or_update_request( # pylint: disable=name-too-long
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -132,7 +132,7 @@ def build_schedulers_update_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -161,7 +161,7 @@ def build_schedulers_delete_request(
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DurableTask/schedulers/{schedulerName}"
path_format_arguments = {
@@ -184,7 +184,7 @@ def build_schedulers_list_by_resource_group_request( # pylint: disable=name-too
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -213,7 +213,7 @@ def build_schedulers_list_by_subscription_request( # pylint: disable=name-too-l
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -233,13 +233,238 @@ def build_schedulers_list_by_subscription_request( # pylint: disable=name-too-l
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+def build_schedulers_get_private_link_request( # pylint: disable=name-too-long
+ resource_group_name: str, scheduler_name: str, private_link_resource_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DurableTask/schedulers/{schedulerName}/privateLinkResources/{privateLinkResourceName}"
+ path_format_arguments = {
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "schedulerName": _SERIALIZER.url("scheduler_name", scheduler_name, "str"),
+ "privateLinkResourceName": _SERIALIZER.url("private_link_resource_name", private_link_resource_name, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_schedulers_list_private_links_request( # pylint: disable=name-too-long
+ resource_group_name: str, scheduler_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DurableTask/schedulers/{schedulerName}/privateLinkResources"
+ path_format_arguments = {
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "schedulerName": _SERIALIZER.url("scheduler_name", scheduler_name, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_schedulers_get_private_endpoint_connection_request( # pylint: disable=name-too-long
+ resource_group_name: str,
+ scheduler_name: str,
+ private_endpoint_connection_name: str,
+ subscription_id: str,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DurableTask/schedulers/{schedulerName}/privateEndpointConnections/{privateEndpointConnectionName}"
+ path_format_arguments = {
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "schedulerName": _SERIALIZER.url("scheduler_name", scheduler_name, "str"),
+ "privateEndpointConnectionName": _SERIALIZER.url(
+ "private_endpoint_connection_name", private_endpoint_connection_name, "str"
+ ),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_schedulers_create_or_update_private_endpoint_connection_request( # pylint: disable=name-too-long
+ resource_group_name: str,
+ scheduler_name: str,
+ private_endpoint_connection_name: str,
+ subscription_id: str,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DurableTask/schedulers/{schedulerName}/privateEndpointConnections/{privateEndpointConnectionName}"
+ path_format_arguments = {
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "schedulerName": _SERIALIZER.url("scheduler_name", scheduler_name, "str"),
+ "privateEndpointConnectionName": _SERIALIZER.url(
+ "private_endpoint_connection_name", private_endpoint_connection_name, "str"
+ ),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_schedulers_update_private_endpoint_connection_request( # pylint: disable=name-too-long
+ resource_group_name: str,
+ scheduler_name: str,
+ private_endpoint_connection_name: str,
+ subscription_id: str,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DurableTask/schedulers/{schedulerName}/privateEndpointConnections/{privateEndpointConnectionName}"
+ path_format_arguments = {
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "schedulerName": _SERIALIZER.url("scheduler_name", scheduler_name, "str"),
+ "privateEndpointConnectionName": _SERIALIZER.url(
+ "private_endpoint_connection_name", private_endpoint_connection_name, "str"
+ ),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_schedulers_delete_private_endpoint_connection_request( # pylint: disable=name-too-long
+ resource_group_name: str,
+ scheduler_name: str,
+ private_endpoint_connection_name: str,
+ subscription_id: str,
+ **kwargs: Any
+) -> HttpRequest:
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
+ # Construct URL
+ _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DurableTask/schedulers/{schedulerName}/privateEndpointConnections/{privateEndpointConnectionName}"
+ path_format_arguments = {
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "schedulerName": _SERIALIZER.url("scheduler_name", scheduler_name, "str"),
+ "privateEndpointConnectionName": _SERIALIZER.url(
+ "private_endpoint_connection_name", private_endpoint_connection_name, "str"
+ ),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
+
+
+def build_schedulers_list_private_endpoint_connections_request( # pylint: disable=name-too-long
+ resource_group_name: str, scheduler_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DurableTask/schedulers/{schedulerName}/privateEndpointConnections"
+ path_format_arguments = {
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "schedulerName": _SERIALIZER.url("scheduler_name", scheduler_name, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
def build_task_hubs_get_request(
resource_group_name: str, scheduler_name: str, task_hub_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -269,7 +494,7 @@ def build_task_hubs_create_or_update_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -299,7 +524,7 @@ def build_task_hubs_delete_request(
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DurableTask/schedulers/{schedulerName}/taskHubs/{taskHubName}"
path_format_arguments = {
@@ -323,7 +548,7 @@ def build_task_hubs_list_by_scheduler_request( # pylint: disable=name-too-long
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -351,7 +576,7 @@ def build_retention_policies_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -380,7 +605,7 @@ def build_retention_policies_create_or_replace_request( # pylint: disable=name-
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -411,7 +636,7 @@ def build_retention_policies_update_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -440,7 +665,7 @@ def build_retention_policies_delete_request(
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DurableTask/schedulers/{schedulerName}/retentionPolicies/default"
path_format_arguments = {
@@ -463,7 +688,7 @@ def build_retention_policies_list_by_scheduler_request( # pylint: disable=name-
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -562,7 +787,10 @@ def prepare_request(next_link=None):
def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.Operation], deserialized.get("value", []))
+ list_of_elem = _deserialize(
+ List[_models.Operation],
+ deserialized.get("value", []),
+ )
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, iter(list_of_elem)
@@ -578,7 +806,10 @@ def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
@@ -642,6 +873,1066 @@ def get(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> _
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = kwargs.pop("stream", False)
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ if _stream:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if _stream:
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
+ else:
+ deserialized = _deserialize(_models.Scheduler, response.json())
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ resource: Union[_models.Scheduler, JSON, IO[bytes]],
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _content = None
+ if isinstance(resource, (IOBase, bytes)):
+ _content = resource
+ else:
+ _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore
+
+ _request = build_schedulers_create_or_update_request(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ subscription_id=self._config.subscription_id,
+ content_type=content_type,
+ api_version=self._config.api_version,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 201:
+ response_headers["Azure-AsyncOperation"] = self._deserialize(
+ "str", response.headers.get("Azure-AsyncOperation")
+ )
+ response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
+
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ resource: _models.Scheduler,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.Scheduler]:
+ """Create or update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param resource: Resource create parameters. Required.
+ :type resource: ~azure.mgmt.durabletask.models.Scheduler
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ resource: JSON,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.Scheduler]:
+ """Create or update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param resource: Resource create parameters. Required.
+ :type resource: JSON
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ resource: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.Scheduler]:
+ """Create or update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param resource: Resource create parameters. Required.
+ :type resource: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace
+ def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ resource: Union[_models.Scheduler, JSON, IO[bytes]],
+ **kwargs: Any
+ ) -> LROPoller[_models.Scheduler]:
+ """Create or update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param resource: Resource create parameters. Is one of the following types: Scheduler, JSON,
+ IO[bytes] Required.
+ :type resource: ~azure.mgmt.durabletask.models.Scheduler or JSON or IO[bytes]
+ :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.Scheduler] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ resource=resource,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ response = pipeline_response.http_response
+ deserialized = _deserialize(_models.Scheduler, response.json())
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+
+ if polling is True:
+ polling_method: PollingMethod = cast(
+ PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ )
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[_models.Scheduler].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[_models.Scheduler](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ def _update_initial(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ properties: Union[_models.SchedulerUpdate, JSON, IO[bytes]],
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _content = None
+ if isinstance(properties, (IOBase, bytes)):
+ _content = properties
+ else:
+ _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore
+
+ _request = build_schedulers_update_request(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ subscription_id=self._config.subscription_id,
+ content_type=content_type,
+ api_version=self._config.api_version,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
+
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ def begin_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ properties: _models.SchedulerUpdate,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.Scheduler]:
+ """Update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param properties: The resource properties to be updated. Required.
+ :type properties: ~azure.mgmt.durabletask.models.SchedulerUpdate
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def begin_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ properties: JSON,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.Scheduler]:
+ """Update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param properties: The resource properties to be updated. Required.
+ :type properties: JSON
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def begin_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ properties: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.Scheduler]:
+ """Update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param properties: The resource properties to be updated. Required.
+ :type properties: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace
+ def begin_update(
+ self,
+ resource_group_name: str,
+ scheduler_name: str,
+ properties: Union[_models.SchedulerUpdate, JSON, IO[bytes]],
+ **kwargs: Any
+ ) -> LROPoller[_models.Scheduler]:
+ """Update a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param properties: The resource properties to be updated. Is one of the following types:
+ SchedulerUpdate, JSON, IO[bytes] Required.
+ :type properties: ~azure.mgmt.durabletask.models.SchedulerUpdate or JSON or IO[bytes]
+ :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
+ MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.Scheduler] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._update_initial(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ properties=properties,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ response = pipeline_response.http_response
+ deserialized = _deserialize(_models.Scheduler, response.json())
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+
+ if polling is True:
+ polling_method: PollingMethod = cast(
+ PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ )
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[_models.Scheduler].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[_models.Scheduler](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ def _delete_initial(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ _request = build_schedulers_delete_request(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ subscription_id=self._config.subscription_id,
+ api_version=self._config.api_version,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
+
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def begin_delete(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> LROPoller[None]:
+ """Delete a Scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :return: An instance of LROPoller that returns None
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
+ if cls:
+ return cls(pipeline_response, None, {}) # type: ignore
+
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+
+ if polling is True:
+ polling_method: PollingMethod = cast(
+ PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ )
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[None].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
+
+ @distributed_trace
+ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.Scheduler"]:
+ """List Schedulers by resource group.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :return: An iterator like instance of Scheduler
+ :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[List[_models.Scheduler]] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_schedulers_list_by_resource_group_request(
+ resource_group_name=resource_group_name,
+ subscription_id=self._config.subscription_id,
+ api_version=self._config.api_version,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url(
+ "self._config.base_url", self._config.base_url, "str", skip_quote=True
+ ),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url(
+ "self._config.base_url", self._config.base_url, "str", skip_quote=True
+ ),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ return _request
+
+ def extract_data(pipeline_response):
+ deserialized = pipeline_response.http_response.json()
+ list_of_elem = _deserialize(
+ List[_models.Scheduler],
+ deserialized.get("value", []),
+ )
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.get("nextLink") or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(get_next, extract_data)
+
+ @distributed_trace
+ def list_by_subscription(self, **kwargs: Any) -> ItemPaged["_models.Scheduler"]:
+ """List Schedulers by subscription.
+
+ :return: An iterator like instance of Scheduler
+ :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.durabletask.models.Scheduler]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[List[_models.Scheduler]] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_schedulers_list_by_subscription_request(
+ subscription_id=self._config.subscription_id,
+ api_version=self._config.api_version,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url(
+ "self._config.base_url", self._config.base_url, "str", skip_quote=True
+ ),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url(
+ "self._config.base_url", self._config.base_url, "str", skip_quote=True
+ ),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ return _request
+
+ def extract_data(pipeline_response):
+ deserialized = pipeline_response.http_response.json()
+ list_of_elem = _deserialize(
+ List[_models.Scheduler],
+ deserialized.get("value", []),
+ )
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.get("nextLink") or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(get_next, extract_data)
+
+ @distributed_trace
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_link_resource_name",
+ "accept",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ def get_private_link(
+ self, resource_group_name: str, scheduler_name: str, private_link_resource_name: str, **kwargs: Any
+ ) -> _models.SchedulerPrivateLinkResource:
+ """Get a private link resource for the durable task scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param private_link_resource_name: The name of the private link associated with the Azure
+ resource. Required.
+ :type private_link_resource_name: str
+ :return: SchedulerPrivateLinkResource. The SchedulerPrivateLinkResource is compatible with
+ MutableMapping
+ :rtype: ~azure.mgmt.durabletask.models.SchedulerPrivateLinkResource
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[_models.SchedulerPrivateLinkResource] = kwargs.pop("cls", None)
+
+ _request = build_schedulers_get_private_link_request(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ private_link_resource_name=private_link_resource_name,
+ subscription_id=self._config.subscription_id,
+ api_version=self._config.api_version,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = kwargs.pop("stream", False)
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ if _stream:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if _stream:
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
+ else:
+ deserialized = _deserialize(_models.SchedulerPrivateLinkResource, response.json())
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": ["api_version", "subscription_id", "resource_group_name", "scheduler_name", "accept"]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ def list_private_links(
+ self, resource_group_name: str, scheduler_name: str, **kwargs: Any
+ ) -> ItemPaged["_models.SchedulerPrivateLinkResource"]:
+ """List private link resources for the durable task scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :return: An iterator like instance of SchedulerPrivateLinkResource
+ :rtype:
+ ~azure.core.paging.ItemPaged[~azure.mgmt.durabletask.models.SchedulerPrivateLinkResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[List[_models.SchedulerPrivateLinkResource]] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_schedulers_list_private_links_request(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ subscription_id=self._config.subscription_id,
+ api_version=self._config.api_version,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url(
+ "self._config.base_url", self._config.base_url, "str", skip_quote=True
+ ),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url(
+ "self._config.base_url", self._config.base_url, "str", skip_quote=True
+ ),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ return _request
+
+ def extract_data(pipeline_response):
+ deserialized = pipeline_response.http_response.json()
+ list_of_elem = _deserialize(
+ List[_models.SchedulerPrivateLinkResource],
+ deserialized.get("value", []),
+ )
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.get("nextLink") or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(get_next, extract_data)
+
+ @distributed_trace
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ "accept",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ def get_private_endpoint_connection(
+ self, resource_group_name: str, scheduler_name: str, private_endpoint_connection_name: str, **kwargs: Any
+ ) -> _models.PrivateEndpointConnection:
+ """Get a private endpoint connection for the durable task scheduler.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
+ :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with
+ MutableMapping
+ :rtype: ~azure.mgmt.durabletask.models.PrivateEndpointConnection
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
+
+ _request = build_schedulers_get_private_endpoint_connection_request(
+ resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ subscription_id=self._config.subscription_id,
+ api_version=self._config.api_version,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -656,24 +1947,43 @@ def get(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> _
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
- deserialized = _deserialize(_models.Scheduler, response.json())
+ deserialized = _deserialize(_models.PrivateEndpointConnection, response.json())
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- def _create_or_update_initial(
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ "content_type",
+ "accept",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ def _create_or_update_private_endpoint_connection_initial( # pylint: disable=name-too-long
self,
resource_group_name: str,
scheduler_name: str,
- resource: Union[_models.Scheduler, JSON, IO[bytes]],
+ private_endpoint_connection_name: str,
+ resource: Union[_models.PrivateEndpointConnection, JSON, IO[bytes]],
**kwargs: Any
) -> Iterator[bytes]:
error_map: MutableMapping = {
@@ -697,9 +2007,10 @@ def _create_or_update_initial(
else:
_content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore
- _request = build_schedulers_create_or_update_request(
+ _request = build_schedulers_create_or_update_private_endpoint_connection_request(
resource_group_name=resource_group_name,
scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
api_version=self._config.api_version,
@@ -712,6 +2023,7 @@ def _create_or_update_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -725,7 +2037,10 @@ def _create_or_update_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -735,7 +2050,7 @@ def _create_or_update_initial(
)
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -743,124 +2058,156 @@ def _create_or_update_initial(
return deserialized # type: ignore
@overload
- def begin_create_or_update(
+ def begin_create_or_update_private_endpoint_connection( # pylint: disable=name-too-long
self,
resource_group_name: str,
scheduler_name: str,
- resource: _models.Scheduler,
+ private_endpoint_connection_name: str,
+ resource: _models.PrivateEndpointConnection,
*,
content_type: str = "application/json",
**kwargs: Any
- ) -> LROPoller[_models.Scheduler]:
- """Create or update a Scheduler.
+ ) -> LROPoller[_models.PrivateEndpointConnection]:
+ """Create or update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param resource: Resource create parameters. Required.
- :type resource: ~azure.mgmt.durabletask.models.Scheduler
+ :type resource: ~azure.mgmt.durabletask.models.PrivateEndpointConnection
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :return: An instance of LROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
- def begin_create_or_update(
+ def begin_create_or_update_private_endpoint_connection( # pylint: disable=name-too-long
self,
resource_group_name: str,
scheduler_name: str,
+ private_endpoint_connection_name: str,
resource: JSON,
*,
content_type: str = "application/json",
**kwargs: Any
- ) -> LROPoller[_models.Scheduler]:
- """Create or update a Scheduler.
+ ) -> LROPoller[_models.PrivateEndpointConnection]:
+ """Create or update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param resource: Resource create parameters. Required.
:type resource: JSON
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :return: An instance of LROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
- def begin_create_or_update(
+ def begin_create_or_update_private_endpoint_connection( # pylint: disable=name-too-long
self,
resource_group_name: str,
scheduler_name: str,
+ private_endpoint_connection_name: str,
resource: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
- ) -> LROPoller[_models.Scheduler]:
- """Create or update a Scheduler.
+ ) -> LROPoller[_models.PrivateEndpointConnection]:
+ """Create or update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param resource: Resource create parameters. Required.
:type resource: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :return: An instance of LROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
- def begin_create_or_update(
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ "content_type",
+ "accept",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ def begin_create_or_update_private_endpoint_connection( # pylint: disable=name-too-long
self,
resource_group_name: str,
scheduler_name: str,
- resource: Union[_models.Scheduler, JSON, IO[bytes]],
+ private_endpoint_connection_name: str,
+ resource: Union[_models.PrivateEndpointConnection, JSON, IO[bytes]],
**kwargs: Any
- ) -> LROPoller[_models.Scheduler]:
- """Create or update a Scheduler.
+ ) -> LROPoller[_models.PrivateEndpointConnection]:
+ """Create or update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
- :param resource: Resource create parameters. Is one of the following types: Scheduler, JSON,
- IO[bytes] Required.
- :type resource: ~azure.mgmt.durabletask.models.Scheduler or JSON or IO[bytes]
- :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
+ :param resource: Resource create parameters. Is one of the following types:
+ PrivateEndpointConnection, JSON, IO[bytes] Required.
+ :type resource: ~azure.mgmt.durabletask.models.PrivateEndpointConnection or JSON or IO[bytes]
+ :return: An instance of LROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.Scheduler] = kwargs.pop("cls", None)
+ cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._create_or_update_initial(
+ raw_result = self._create_or_update_private_endpoint_connection_initial(
resource_group_name=resource_group_name,
scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
resource=resource,
content_type=content_type,
cls=lambda x, y, z: x,
@@ -873,7 +2220,7 @@ def begin_create_or_update(
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
- deserialized = _deserialize(_models.Scheduler, response.json())
+ deserialized = _deserialize(_models.PrivateEndpointConnection, response.json())
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -891,21 +2238,37 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller[_models.Scheduler].from_continuation_token(
+ return LROPoller[_models.PrivateEndpointConnection].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller[_models.Scheduler](
+ return LROPoller[_models.PrivateEndpointConnection](
self._client, raw_result, get_long_running_output, polling_method # type: ignore
)
- def _update_initial(
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ "content_type",
+ "accept",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ def _update_private_endpoint_connection_initial( # pylint: disable=name-too-long
self,
resource_group_name: str,
scheduler_name: str,
- properties: Union[_models.SchedulerUpdate, JSON, IO[bytes]],
+ private_endpoint_connection_name: str,
+ properties: Union[_models.PrivateEndpointConnectionUpdate, JSON, IO[bytes]],
**kwargs: Any
) -> Iterator[bytes]:
error_map: MutableMapping = {
@@ -929,9 +2292,10 @@ def _update_initial(
else:
_content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore
- _request = build_schedulers_update_request(
+ _request = build_schedulers_update_private_endpoint_connection_request(
resource_group_name=resource_group_name,
scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
api_version=self._config.api_version,
@@ -944,6 +2308,7 @@ def _update_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -957,7 +2322,10 @@ def _update_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -965,7 +2333,7 @@ def _update_initial(
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -973,124 +2341,157 @@ def _update_initial(
return deserialized # type: ignore
@overload
- def begin_update(
+ def begin_update_private_endpoint_connection(
self,
resource_group_name: str,
scheduler_name: str,
- properties: _models.SchedulerUpdate,
+ private_endpoint_connection_name: str,
+ properties: _models.PrivateEndpointConnectionUpdate,
*,
content_type: str = "application/json",
**kwargs: Any
- ) -> LROPoller[_models.Scheduler]:
- """Update a Scheduler.
+ ) -> LROPoller[_models.PrivateEndpointConnection]:
+ """Update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param properties: The resource properties to be updated. Required.
- :type properties: ~azure.mgmt.durabletask.models.SchedulerUpdate
+ :type properties: ~azure.mgmt.durabletask.models.PrivateEndpointConnectionUpdate
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :return: An instance of LROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
- def begin_update(
+ def begin_update_private_endpoint_connection(
self,
resource_group_name: str,
scheduler_name: str,
+ private_endpoint_connection_name: str,
properties: JSON,
*,
content_type: str = "application/json",
**kwargs: Any
- ) -> LROPoller[_models.Scheduler]:
- """Update a Scheduler.
+ ) -> LROPoller[_models.PrivateEndpointConnection]:
+ """Update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param properties: The resource properties to be updated. Required.
:type properties: JSON
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :return: An instance of LROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
- def begin_update(
+ def begin_update_private_endpoint_connection(
self,
resource_group_name: str,
scheduler_name: str,
+ private_endpoint_connection_name: str,
properties: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
- ) -> LROPoller[_models.Scheduler]:
- """Update a Scheduler.
+ ) -> LROPoller[_models.PrivateEndpointConnection]:
+ """Update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param properties: The resource properties to be updated. Required.
:type properties: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ :return: An instance of LROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
- def begin_update(
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ "content_type",
+ "accept",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ def begin_update_private_endpoint_connection(
self,
resource_group_name: str,
scheduler_name: str,
- properties: Union[_models.SchedulerUpdate, JSON, IO[bytes]],
+ private_endpoint_connection_name: str,
+ properties: Union[_models.PrivateEndpointConnectionUpdate, JSON, IO[bytes]],
**kwargs: Any
- ) -> LROPoller[_models.Scheduler]:
- """Update a Scheduler.
+ ) -> LROPoller[_models.PrivateEndpointConnection]:
+ """Update a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:param properties: The resource properties to be updated. Is one of the following types:
- SchedulerUpdate, JSON, IO[bytes] Required.
- :type properties: ~azure.mgmt.durabletask.models.SchedulerUpdate or JSON or IO[bytes]
- :return: An instance of LROPoller that returns Scheduler. The Scheduler is compatible with
- MutableMapping
- :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.Scheduler]
+ PrivateEndpointConnectionUpdate, JSON, IO[bytes] Required.
+ :type properties: ~azure.mgmt.durabletask.models.PrivateEndpointConnectionUpdate or JSON or
+ IO[bytes]
+ :return: An instance of LROPoller that returns PrivateEndpointConnection. The
+ PrivateEndpointConnection is compatible with MutableMapping
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.Scheduler] = kwargs.pop("cls", None)
+ cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._update_initial(
+ raw_result = self._update_private_endpoint_connection_initial(
resource_group_name=resource_group_name,
scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
properties=properties,
content_type=content_type,
cls=lambda x, y, z: x,
@@ -1103,7 +2504,7 @@ def begin_update(
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
- deserialized = _deserialize(_models.Scheduler, response.json())
+ deserialized = _deserialize(_models.PrivateEndpointConnection, response.json())
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -1121,17 +2522,32 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller[_models.Scheduler].from_continuation_token(
+ return LROPoller[_models.PrivateEndpointConnection].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller[_models.Scheduler](
+ return LROPoller[_models.PrivateEndpointConnection](
self._client, raw_result, get_long_running_output, polling_method # type: ignore
)
- def _delete_initial(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> Iterator[bytes]:
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ def _delete_private_endpoint_connection_initial( # pylint: disable=name-too-long
+ self, resource_group_name: str, scheduler_name: str, private_endpoint_connection_name: str, **kwargs: Any
+ ) -> Iterator[bytes]:
error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
@@ -1145,9 +2561,10 @@ def _delete_initial(self, resource_group_name: str, scheduler_name: str, **kwarg
cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- _request = build_schedulers_delete_request(
+ _request = build_schedulers_delete_private_endpoint_connection_request(
resource_group_name=resource_group_name,
scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=self._config.api_version,
headers=_headers,
@@ -1158,6 +2575,7 @@ def _delete_initial(self, resource_group_name: str, scheduler_name: str, **kwarg
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1171,7 +2589,10 @@ def _delete_initial(self, resource_group_name: str, scheduler_name: str, **kwarg
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -1179,7 +2600,7 @@ def _delete_initial(self, resource_group_name: str, scheduler_name: str, **kwarg
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -1187,14 +2608,32 @@ def _delete_initial(self, resource_group_name: str, scheduler_name: str, **kwarg
return deserialized # type: ignore
@distributed_trace
- def begin_delete(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> LROPoller[None]:
- """Delete a Scheduler.
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": [
+ "api_version",
+ "subscription_id",
+ "resource_group_name",
+ "scheduler_name",
+ "private_endpoint_connection_name",
+ ]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ def begin_delete_private_endpoint_connection(
+ self, resource_group_name: str, scheduler_name: str, private_endpoint_connection_name: str, **kwargs: Any
+ ) -> LROPoller[None]:
+ """Delete a private endpoint connection for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param scheduler_name: The name of the Scheduler. Required.
:type scheduler_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the Azure resource. Required.
+ :type private_endpoint_connection_name: str
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1207,9 +2646,10 @@ def begin_delete(self, resource_group_name: str, scheduler_name: str, **kwargs:
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial(
+ raw_result = self._delete_private_endpoint_connection_initial(
resource_group_name=resource_group_name,
scheduler_name=scheduler_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
@@ -1244,20 +2684,31 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
- def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.Scheduler"]:
- """List Schedulers by resource group.
+ @api_version_validation(
+ method_added_on="2026-02-01",
+ params_added_on={
+ "2026-02-01": ["api_version", "subscription_id", "resource_group_name", "scheduler_name", "accept"]
+ },
+ api_versions_list=["2026-02-01"],
+ )
+ def list_private_endpoint_connections(
+ self, resource_group_name: str, scheduler_name: str, **kwargs: Any
+ ) -> ItemPaged["_models.PrivateEndpointConnection"]:
+ """List private endpoint connections for the durable task scheduler.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :return: An iterator like instance of Scheduler
- :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.durabletask.models.Scheduler]
+ :param scheduler_name: The name of the Scheduler. Required.
+ :type scheduler_name: str
+ :return: An iterator like instance of PrivateEndpointConnection
+ :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.durabletask.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
- cls: ClsType[List[_models.Scheduler]] = kwargs.pop("cls", None)
+ cls: ClsType[List[_models.PrivateEndpointConnection]] = kwargs.pop("cls", None)
error_map: MutableMapping = {
401: ClientAuthenticationError,
@@ -1270,8 +2721,9 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
def prepare_request(next_link=None):
if not next_link:
- _request = build_schedulers_list_by_resource_group_request(
+ _request = build_schedulers_list_private_endpoint_connections_request(
resource_group_name=resource_group_name,
+ scheduler_name=scheduler_name,
subscription_id=self._config.subscription_id,
api_version=self._config.api_version,
headers=_headers,
@@ -1308,91 +2760,10 @@ def prepare_request(next_link=None):
def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.Scheduler], deserialized.get("value", []))
- if cls:
- list_of_elem = cls(list_of_elem) # type: ignore
- return deserialized.get("nextLink") or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- _request = prepare_request(next_link)
-
- _stream = False
- pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- _request, stream=_stream, **kwargs
+ list_of_elem = _deserialize(
+ List[_models.PrivateEndpointConnection],
+ deserialized.get("value", []),
)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
- raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(get_next, extract_data)
-
- @distributed_trace
- def list_by_subscription(self, **kwargs: Any) -> ItemPaged["_models.Scheduler"]:
- """List Schedulers by subscription.
-
- :return: An iterator like instance of Scheduler
- :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.durabletask.models.Scheduler]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- _headers = kwargs.pop("headers", {}) or {}
- _params = kwargs.pop("params", {}) or {}
-
- cls: ClsType[List[_models.Scheduler]] = kwargs.pop("cls", None)
-
- error_map: MutableMapping = {
- 401: ClientAuthenticationError,
- 404: ResourceNotFoundError,
- 409: ResourceExistsError,
- 304: ResourceNotModifiedError,
- }
- error_map.update(kwargs.pop("error_map", {}) or {})
-
- def prepare_request(next_link=None):
- if not next_link:
-
- _request = build_schedulers_list_by_subscription_request(
- subscription_id=self._config.subscription_id,
- api_version=self._config.api_version,
- headers=_headers,
- params=_params,
- )
- path_format_arguments = {
- "endpoint": self._serialize.url(
- "self._config.base_url", self._config.base_url, "str", skip_quote=True
- ),
- }
- _request.url = self._client.format_url(_request.url, **path_format_arguments)
-
- else:
- # make call to next link with the client's api-version
- _parsed_next_link = urllib.parse.urlparse(next_link)
- _next_request_params = case_insensitive_dict(
- {
- key: [urllib.parse.quote(v) for v in value]
- for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
- }
- )
- _next_request_params["api-version"] = self._config.api_version
- _request = HttpRequest(
- "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
- )
- path_format_arguments = {
- "endpoint": self._serialize.url(
- "self._config.base_url", self._config.base_url, "str", skip_quote=True
- ),
- }
- _request.url = self._client.format_url(_request.url, **path_format_arguments)
-
- return _request
-
- def extract_data(pipeline_response):
- deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.Scheduler], deserialized.get("value", []))
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, iter(list_of_elem)
@@ -1408,7 +2779,10 @@ def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
@@ -1475,6 +2849,7 @@ def get(self, resource_group_name: str, scheduler_name: str, task_hub_name: str,
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1489,11 +2864,14 @@ def get(self, resource_group_name: str, scheduler_name: str, task_hub_name: str,
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.TaskHub, response.json())
@@ -1547,6 +2925,7 @@ def _create_or_update_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1560,7 +2939,10 @@ def _create_or_update_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -1570,7 +2952,7 @@ def _create_or_update_initial(
)
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -1779,6 +3161,7 @@ def _delete_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1792,7 +3175,10 @@ def _delete_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -1800,7 +3186,7 @@ def _delete_initial(
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -1939,7 +3325,10 @@ def prepare_request(next_link=None):
def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.TaskHub], deserialized.get("value", []))
+ list_of_elem = _deserialize(
+ List[_models.TaskHub],
+ deserialized.get("value", []),
+ )
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, iter(list_of_elem)
@@ -1955,7 +3344,10 @@ def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
@@ -1986,7 +3378,7 @@ def __init__(self, *args, **kwargs) -> None:
params_added_on={
"2025-04-01-preview": ["api_version", "subscription_id", "resource_group_name", "scheduler_name", "accept"]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
def get(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> _models.RetentionPolicy:
"""Get a Retention Policy.
@@ -2026,6 +3418,7 @@ def get(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> _
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -2040,11 +3433,14 @@ def get(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> _
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.RetentionPolicy, response.json())
@@ -2065,7 +3461,7 @@ def get(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> _
"accept",
]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
def _create_or_replace_initial(
self,
@@ -2110,6 +3506,7 @@ def _create_or_replace_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -2123,7 +3520,10 @@ def _create_or_replace_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -2133,7 +3533,7 @@ def _create_or_replace_initial(
)
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -2237,7 +3637,7 @@ def begin_create_or_replace(
"accept",
]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
def begin_create_or_replace(
self,
@@ -2325,7 +3725,7 @@ def get_long_running_output(pipeline_response):
"accept",
]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
def _update_initial(
self,
@@ -2370,6 +3770,7 @@ def _update_initial(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -2383,7 +3784,10 @@ def _update_initial(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -2391,7 +3795,7 @@ def _update_initial(
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -2495,7 +3899,7 @@ def begin_update(
"accept",
]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
def begin_update(
self,
@@ -2576,7 +3980,7 @@ def get_long_running_output(pipeline_response):
params_added_on={
"2025-04-01-preview": ["api_version", "subscription_id", "resource_group_name", "scheduler_name"]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
def _delete_initial(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> Iterator[bytes]:
error_map: MutableMapping = {
@@ -2605,6 +4009,7 @@ def _delete_initial(self, resource_group_name: str, scheduler_name: str, **kwarg
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -2618,7 +4023,10 @@ def _delete_initial(self, resource_group_name: str, scheduler_name: str, **kwarg
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
@@ -2626,7 +4034,7 @@ def _delete_initial(self, resource_group_name: str, scheduler_name: str, **kwarg
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -2639,7 +4047,7 @@ def _delete_initial(self, resource_group_name: str, scheduler_name: str, **kwarg
params_added_on={
"2025-04-01-preview": ["api_version", "subscription_id", "resource_group_name", "scheduler_name"]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
def begin_delete(self, resource_group_name: str, scheduler_name: str, **kwargs: Any) -> LROPoller[None]:
"""Delete a Retention Policy.
@@ -2703,7 +4111,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
params_added_on={
"2025-04-01-preview": ["api_version", "subscription_id", "resource_group_name", "scheduler_name", "accept"]
},
- api_versions_list=["2025-04-01-preview", "2025-11-01"],
+ api_versions_list=["2025-04-01-preview", "2025-11-01", "2026-02-01"],
)
def list_by_scheduler(
self, resource_group_name: str, scheduler_name: str, **kwargs: Any
@@ -2774,7 +4182,10 @@ def prepare_request(next_link=None):
def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.RetentionPolicy], deserialized.get("value", []))
+ list_of_elem = _deserialize(
+ List[_models.RetentionPolicy],
+ deserialized.get("value", []),
+ )
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, iter(list_of_elem)
@@ -2790,7 +4201,10 @@ def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.ErrorResponse, response)
+ error = _failsafe_deserialize(
+ _models.ErrorResponse,
+ response,
+ )
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
diff --git a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/operations/_patch.py b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/operations/_patch.py
index 8bcb627aa475..87676c65a8f0 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/operations/_patch.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/azure/mgmt/durabletask/operations/_patch.py
@@ -7,9 +7,9 @@
Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
-from typing import List
-__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
def patch_sdk():
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/operations_list.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/operations_list.py
index f29f821d7612..620d5d0dbd8e 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/operations_list.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/operations_list.py
@@ -35,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: 2025-11-01/Operations_List.json
+# x-ms-original-file: 2026-02-01/Operations_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_create_maximum_set_gen.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_create_maximum_set_gen.py
new file mode 100644
index 000000000000..5ed09e5d6d21
--- /dev/null
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_create_maximum_set_gen.py
@@ -0,0 +1,53 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.durabletask import DurableTaskMgmtClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-durabletask
+# USAGE
+ python private_endpoint_connections_create_maximum_set_gen.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DurableTaskMgmtClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="SUBSCRIPTION_ID",
+ )
+
+ response = client.schedulers.begin_create_or_update_private_endpoint_connection(
+ resource_group_name="rgdurabletask",
+ scheduler_name="testscheduler",
+ private_endpoint_connection_name="spzckqrbhfnabu",
+ resource={
+ "properties": {
+ "privateEndpoint": {},
+ "privateLinkServiceConnectionState": {
+ "actionsRequired": "mxymqfbbmpwjxsroldlsd",
+ "description": "ujdcsoyxljivwsgfkexhotaxcmzq",
+ "status": "Pending",
+ },
+ }
+ },
+ ).result()
+ print(response)
+
+
+# x-ms-original-file: 2026-02-01/PrivateEndpointConnections_Create_MaximumSet_Gen.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_delete_maximum_set_gen.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_delete_maximum_set_gen.py
new file mode 100644
index 000000000000..bad74af3e971
--- /dev/null
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_delete_maximum_set_gen.py
@@ -0,0 +1,42 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.durabletask import DurableTaskMgmtClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-durabletask
+# USAGE
+ python private_endpoint_connections_delete_maximum_set_gen.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DurableTaskMgmtClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="SUBSCRIPTION_ID",
+ )
+
+ client.schedulers.begin_delete_private_endpoint_connection(
+ resource_group_name="rgdurabletask",
+ scheduler_name="testscheduler",
+ private_endpoint_connection_name="spzckqrbhfnabu",
+ ).result()
+
+
+# x-ms-original-file: 2026-02-01/PrivateEndpointConnections_Delete_MaximumSet_Gen.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_get_maximum_set_gen.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_get_maximum_set_gen.py
new file mode 100644
index 000000000000..6e0d8832d95a
--- /dev/null
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_get_maximum_set_gen.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.durabletask import DurableTaskMgmtClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-durabletask
+# USAGE
+ python private_endpoint_connections_get_maximum_set_gen.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DurableTaskMgmtClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="SUBSCRIPTION_ID",
+ )
+
+ response = client.schedulers.get_private_endpoint_connection(
+ resource_group_name="rgdurabletask",
+ scheduler_name="testscheduler",
+ private_endpoint_connection_name="spzckqrbhfnabu",
+ )
+ print(response)
+
+
+# x-ms-original-file: 2026-02-01/PrivateEndpointConnections_Get_MaximumSet_Gen.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_list_maximum_set_gen.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_list_maximum_set_gen.py
new file mode 100644
index 000000000000..a5f4c3348534
--- /dev/null
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_list_maximum_set_gen.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.durabletask import DurableTaskMgmtClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-durabletask
+# USAGE
+ python private_endpoint_connections_list_maximum_set_gen.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DurableTaskMgmtClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="SUBSCRIPTION_ID",
+ )
+
+ response = client.schedulers.list_private_endpoint_connections(
+ resource_group_name="rgdurabletask",
+ scheduler_name="testscheduler",
+ )
+ for item in response:
+ print(item)
+
+
+# x-ms-original-file: 2026-02-01/PrivateEndpointConnections_List_MaximumSet_Gen.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_update.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_update.py
new file mode 100644
index 000000000000..3ad1d020dce3
--- /dev/null
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_endpoint_connections_update.py
@@ -0,0 +1,44 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.durabletask import DurableTaskMgmtClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-durabletask
+# USAGE
+ python private_endpoint_connections_update.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DurableTaskMgmtClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="SUBSCRIPTION_ID",
+ )
+
+ response = client.schedulers.begin_update_private_endpoint_connection(
+ resource_group_name="rgdurabletask",
+ scheduler_name="testscheduler",
+ private_endpoint_connection_name="spzckqrbhfnabu",
+ properties={"properties": {"privateLinkServiceConnectionState": {"status": "Approved"}}},
+ ).result()
+ print(response)
+
+
+# x-ms-original-file: 2026-02-01/PrivateEndpointConnections_Update.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_link_resources_get_maximum_set_gen.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_link_resources_get_maximum_set_gen.py
new file mode 100644
index 000000000000..a93718ecc69c
--- /dev/null
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_link_resources_get_maximum_set_gen.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.durabletask import DurableTaskMgmtClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-durabletask
+# USAGE
+ python private_link_resources_get_maximum_set_gen.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DurableTaskMgmtClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="SUBSCRIPTION_ID",
+ )
+
+ response = client.schedulers.get_private_link(
+ resource_group_name="rgdurabletask",
+ scheduler_name="testscheduler",
+ private_link_resource_name="ulbdiqhrmwnkejje",
+ )
+ print(response)
+
+
+# x-ms-original-file: 2026-02-01/PrivateLinkResources_Get_MaximumSet_Gen.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_link_resources_list_maximum_set_gen.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_link_resources_list_maximum_set_gen.py
new file mode 100644
index 000000000000..377f28ae4385
--- /dev/null
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/private_link_resources_list_maximum_set_gen.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.durabletask import DurableTaskMgmtClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-durabletask
+# USAGE
+ python private_link_resources_list_maximum_set_gen.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DurableTaskMgmtClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="SUBSCRIPTION_ID",
+ )
+
+ response = client.schedulers.list_private_links(
+ resource_group_name="rgdurabletask",
+ scheduler_name="testscheduler",
+ )
+ for item in response:
+ print(item)
+
+
+# x-ms-original-file: 2026-02-01/PrivateLinkResources_List_MaximumSet_Gen.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_create_or_replace_maximum_set_gen.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_create_or_replace_maximum_set_gen.py
index d170140d2d41..c4b2b2dd832c 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_create_or_replace_maximum_set_gen.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_create_or_replace_maximum_set_gen.py
@@ -45,6 +45,6 @@ def main():
print(response)
-# x-ms-original-file: 2025-11-01/RetentionPolicies_CreateOrReplace_MaximumSet_Gen.json
+# x-ms-original-file: 2026-02-01/RetentionPolicies_CreateOrReplace_MaximumSet_Gen.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_delete_maximum_set_gen.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_delete_maximum_set_gen.py
index 2f663c903793..7db2aeac74d2 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_delete_maximum_set_gen.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_delete_maximum_set_gen.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: 2025-11-01/RetentionPolicies_Delete_MaximumSet_Gen.json
+# x-ms-original-file: 2026-02-01/RetentionPolicies_Delete_MaximumSet_Gen.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_get_maximum_set_gen.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_get_maximum_set_gen.py
index 0e47923726ec..19f9ba8febac 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_get_maximum_set_gen.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_get_maximum_set_gen.py
@@ -37,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: 2025-11-01/RetentionPolicies_Get_MaximumSet_Gen.json
+# x-ms-original-file: 2026-02-01/RetentionPolicies_Get_MaximumSet_Gen.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_list_by_scheduler_maximum_set_gen.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_list_by_scheduler_maximum_set_gen.py
index 63e52333d999..7570b91005ab 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_list_by_scheduler_maximum_set_gen.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_list_by_scheduler_maximum_set_gen.py
@@ -38,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: 2025-11-01/RetentionPolicies_ListByScheduler_MaximumSet_Gen.json
+# x-ms-original-file: 2026-02-01/RetentionPolicies_ListByScheduler_MaximumSet_Gen.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_update_maximum_set_gen.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_update_maximum_set_gen.py
index e3bc8113e9ab..6ee0acf008a8 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_update_maximum_set_gen.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/retention_policies_update_maximum_set_gen.py
@@ -46,6 +46,6 @@ def main():
print(response)
-# x-ms-original-file: 2025-11-01/RetentionPolicies_Update_MaximumSet_Gen.json
+# x-ms-original-file: 2026-02-01/RetentionPolicies_Update_MaximumSet_Gen.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_create_or_update.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_create_or_update.py
index d8e910a8906e..beb891aebcbe 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_create_or_update.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_create_or_update.py
@@ -42,6 +42,6 @@ def main():
print(response)
-# x-ms-original-file: 2025-11-01/Schedulers_CreateOrUpdate.json
+# x-ms-original-file: 2026-02-01/Schedulers_CreateOrUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_delete.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_delete.py
index c3d3b364d7cb..97e88de7bc3a 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_delete.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_delete.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: 2025-11-01/Schedulers_Delete.json
+# x-ms-original-file: 2026-02-01/Schedulers_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_get.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_get.py
index 2eb510bfc40b..984e5c0f16ea 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_get.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_get.py
@@ -37,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: 2025-11-01/Schedulers_Get.json
+# x-ms-original-file: 2026-02-01/Schedulers_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_list_by_resource_group.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_list_by_resource_group.py
index 69506b78ae17..f9ebb33fc3e5 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_list_by_resource_group.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_list_by_resource_group.py
@@ -37,6 +37,6 @@ def main():
print(item)
-# x-ms-original-file: 2025-11-01/Schedulers_ListByResourceGroup.json
+# x-ms-original-file: 2026-02-01/Schedulers_ListByResourceGroup.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_list_by_subscription.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_list_by_subscription.py
index fa546a299556..13415de76658 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_list_by_subscription.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_list_by_subscription.py
@@ -35,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: 2025-11-01/Schedulers_ListBySubscription.json
+# x-ms-original-file: 2026-02-01/Schedulers_ListBySubscription.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_update.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_update.py
index f0353e1342a1..2338bcbe2e79 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_update.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/schedulers_update.py
@@ -41,6 +41,6 @@ def main():
print(response)
-# x-ms-original-file: 2025-11-01/Schedulers_Update.json
+# x-ms-original-file: 2026-02-01/Schedulers_Update.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_create_or_update.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_create_or_update.py
index 6bea6591f2c8..d0109c2be9e9 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_create_or_update.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_create_or_update.py
@@ -39,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: 2025-11-01/TaskHubs_CreateOrUpdate.json
+# x-ms-original-file: 2026-02-01/TaskHubs_CreateOrUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_delete.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_delete.py
index 7a190c4b1f4c..2aad219c26bc 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_delete.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_delete.py
@@ -37,6 +37,6 @@ def main():
).result()
-# x-ms-original-file: 2025-11-01/TaskHubs_Delete.json
+# x-ms-original-file: 2026-02-01/TaskHubs_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_get.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_get.py
index e6e200c18746..117a8fec166a 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_get.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_get.py
@@ -38,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: 2025-11-01/TaskHubs_Get.json
+# x-ms-original-file: 2026-02-01/TaskHubs_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_list_by_scheduler.py b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_list_by_scheduler.py
index 3f5a9333a54a..3c72c31ad9ce 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_list_by_scheduler.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_samples/task_hubs_list_by_scheduler.py
@@ -38,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: 2025-11-01/TaskHubs_ListByScheduler.json
+# x-ms-original-file: 2026-02-01/TaskHubs_ListByScheduler.json
if __name__ == "__main__":
main()
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_tests/test_durable_task_mgmt_schedulers_operations.py b/sdk/durabletask/azure-mgmt-durabletask/generated_tests/test_durable_task_mgmt_schedulers_operations.py
index 004fca6fa30e..81a9af59bc7c 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_tests/test_durable_task_mgmt_schedulers_operations.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_tests/test_durable_task_mgmt_schedulers_operations.py
@@ -43,7 +43,33 @@ def test_schedulers_begin_create_or_update(self, resource_group):
"ipAllowlist": ["str"],
"sku": {"name": "str", "capacity": 0, "redundancyState": "str"},
"endpoint": "str",
+ "privateEndpointConnections": [
+ {
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "privateLinkServiceConnectionState": {
+ "actionsRequired": "str",
+ "description": "str",
+ "status": "str",
+ },
+ "groupIds": ["str"],
+ "privateEndpoint": {"id": "str"},
+ "provisioningState": "str",
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ }
+ ],
"provisioningState": "str",
+ "publicNetworkAccess": "str",
},
"systemData": {
"createdAt": "2020-02-20 00:00:00",
@@ -72,6 +98,7 @@ def test_schedulers_begin_update(self, resource_group):
"endpoint": "str",
"ipAllowlist": ["str"],
"provisioningState": "str",
+ "publicNetworkAccess": "str",
"sku": {"capacity": 0, "name": "str", "redundancyState": "str"},
},
"tags": {"str": "str"},
@@ -109,3 +136,118 @@ def test_schedulers_list_by_subscription(self, resource_group):
result = [r for r in response]
# please add some check logic here by yourself
# ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_schedulers_get_private_link(self, resource_group):
+ response = self.client.schedulers.get_private_link(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ private_link_resource_name="str",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_schedulers_list_private_links(self, resource_group):
+ response = self.client.schedulers.list_private_links(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_schedulers_get_private_endpoint_connection(self, resource_group):
+ response = self.client.schedulers.get_private_endpoint_connection(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ private_endpoint_connection_name="str",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_schedulers_begin_create_or_update_private_endpoint_connection(self, resource_group):
+ response = self.client.schedulers.begin_create_or_update_private_endpoint_connection(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ private_endpoint_connection_name="str",
+ resource={
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "privateLinkServiceConnectionState": {
+ "actionsRequired": "str",
+ "description": "str",
+ "status": "str",
+ },
+ "groupIds": ["str"],
+ "privateEndpoint": {"id": "str"},
+ "provisioningState": "str",
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_schedulers_begin_update_private_endpoint_connection(self, resource_group):
+ response = self.client.schedulers.begin_update_private_endpoint_connection(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ private_endpoint_connection_name="str",
+ properties={
+ "properties": {
+ "privateEndpoint": {"id": "str"},
+ "privateLinkServiceConnectionState": {
+ "actionsRequired": "str",
+ "description": "str",
+ "status": "str",
+ },
+ }
+ },
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_schedulers_begin_delete_private_endpoint_connection(self, resource_group):
+ response = self.client.schedulers.begin_delete_private_endpoint_connection(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ private_endpoint_connection_name="str",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_schedulers_list_private_endpoint_connections(self, resource_group):
+ response = self.client.schedulers.list_private_endpoint_connections(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/durabletask/azure-mgmt-durabletask/generated_tests/test_durable_task_mgmt_schedulers_operations_async.py b/sdk/durabletask/azure-mgmt-durabletask/generated_tests/test_durable_task_mgmt_schedulers_operations_async.py
index e79c02401cc1..e6418815bf3e 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/generated_tests/test_durable_task_mgmt_schedulers_operations_async.py
+++ b/sdk/durabletask/azure-mgmt-durabletask/generated_tests/test_durable_task_mgmt_schedulers_operations_async.py
@@ -45,7 +45,33 @@ async def test_schedulers_begin_create_or_update(self, resource_group):
"ipAllowlist": ["str"],
"sku": {"name": "str", "capacity": 0, "redundancyState": "str"},
"endpoint": "str",
+ "privateEndpointConnections": [
+ {
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "privateLinkServiceConnectionState": {
+ "actionsRequired": "str",
+ "description": "str",
+ "status": "str",
+ },
+ "groupIds": ["str"],
+ "privateEndpoint": {"id": "str"},
+ "provisioningState": "str",
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ }
+ ],
"provisioningState": "str",
+ "publicNetworkAccess": "str",
},
"systemData": {
"createdAt": "2020-02-20 00:00:00",
@@ -76,6 +102,7 @@ async def test_schedulers_begin_update(self, resource_group):
"endpoint": "str",
"ipAllowlist": ["str"],
"provisioningState": "str",
+ "publicNetworkAccess": "str",
"sku": {"capacity": 0, "name": "str", "redundancyState": "str"},
},
"tags": {"str": "str"},
@@ -116,3 +143,124 @@ async def test_schedulers_list_by_subscription(self, resource_group):
result = [r async for r in response]
# please add some check logic here by yourself
# ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_schedulers_get_private_link(self, resource_group):
+ response = await self.client.schedulers.get_private_link(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ private_link_resource_name="str",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_schedulers_list_private_links(self, resource_group):
+ response = self.client.schedulers.list_private_links(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_schedulers_get_private_endpoint_connection(self, resource_group):
+ response = await self.client.schedulers.get_private_endpoint_connection(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ private_endpoint_connection_name="str",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_schedulers_begin_create_or_update_private_endpoint_connection(self, resource_group):
+ response = await (
+ await self.client.schedulers.begin_create_or_update_private_endpoint_connection(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ private_endpoint_connection_name="str",
+ resource={
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "privateLinkServiceConnectionState": {
+ "actionsRequired": "str",
+ "description": "str",
+ "status": "str",
+ },
+ "groupIds": ["str"],
+ "privateEndpoint": {"id": "str"},
+ "provisioningState": "str",
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_schedulers_begin_update_private_endpoint_connection(self, resource_group):
+ response = await (
+ await self.client.schedulers.begin_update_private_endpoint_connection(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ private_endpoint_connection_name="str",
+ properties={
+ "properties": {
+ "privateEndpoint": {"id": "str"},
+ "privateLinkServiceConnectionState": {
+ "actionsRequired": "str",
+ "description": "str",
+ "status": "str",
+ },
+ }
+ },
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_schedulers_begin_delete_private_endpoint_connection(self, resource_group):
+ response = await (
+ await self.client.schedulers.begin_delete_private_endpoint_connection(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ private_endpoint_connection_name="str",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_schedulers_list_private_endpoint_connections(self, resource_group):
+ response = self.client.schedulers.list_private_endpoint_connections(
+ resource_group_name=resource_group.name,
+ scheduler_name="str",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/durabletask/azure-mgmt-durabletask/pyproject.toml b/sdk/durabletask/azure-mgmt-durabletask/pyproject.toml
index b74103e24803..1f7687cf95b6 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/pyproject.toml
+++ b/sdk/durabletask/azure-mgmt-durabletask/pyproject.toml
@@ -54,8 +54,8 @@ content-type = "text/markdown"
[tool.setuptools.packages.find]
exclude = [
"tests*",
- "samples*",
"generated_tests*",
+ "samples*",
"generated_samples*",
"doc*",
"azure",
diff --git a/sdk/durabletask/azure-mgmt-durabletask/tsp-location.yaml b/sdk/durabletask/azure-mgmt-durabletask/tsp-location.yaml
index bb6fc99f5ae3..88b0416152cb 100644
--- a/sdk/durabletask/azure-mgmt-durabletask/tsp-location.yaml
+++ b/sdk/durabletask/azure-mgmt-durabletask/tsp-location.yaml
@@ -1,4 +1,4 @@
directory: specification/durabletask/DurableTask.Management
-commit: 688609bf18c3978794a0c5ca31557c286c623b44
+commit: a86317f00c71ef51aab7f78e6f4a2be3cf58f2cd
repo: Azure/azure-rest-api-specs
additionalDirectories: