diff --git a/sdk/keyvault/azure-keyvault-secrets/CHANGELOG.md b/sdk/keyvault/azure-keyvault-secrets/CHANGELOG.md
index be470f9ef2c6..6b791150e960 100644
--- a/sdk/keyvault/azure-keyvault-secrets/CHANGELOG.md
+++ b/sdk/keyvault/azure-keyvault-secrets/CHANGELOG.md
@@ -1,5 +1,9 @@
# Release History
+## 4.11.0 (2026-03-06)
+
+skip changelog generation for data-plane package and please add changelog manually.
+
## 4.10.1 (Unreleased)
### Features Added
diff --git a/sdk/keyvault/azure-keyvault-secrets/MANIFEST.in b/sdk/keyvault/azure-keyvault-secrets/MANIFEST.in
index 91c95d391763..ff7a325b7774 100644
--- a/sdk/keyvault/azure-keyvault-secrets/MANIFEST.in
+++ b/sdk/keyvault/azure-keyvault-secrets/MANIFEST.in
@@ -1,7 +1,8 @@
include *.md
include LICENSE
-include azure/keyvault/secrets/py.typed
+include azure/keyvault/secrets/_generated/py.typed
recursive-include tests *.py
recursive-include samples *.py *.md
include azure/__init__.py
include azure/keyvault/__init__.py
+include azure/keyvault/secrets/__init__.py
diff --git a/sdk/keyvault/azure-keyvault-secrets/_metadata.json b/sdk/keyvault/azure-keyvault-secrets/_metadata.json
new file mode 100644
index 000000000000..27bbff5cbcd6
--- /dev/null
+++ b/sdk/keyvault/azure-keyvault-secrets/_metadata.json
@@ -0,0 +1,10 @@
+{
+ "apiVersion": "2025-07-01",
+ "apiVersions": {
+ "KeyVault": "2025-07-01"
+ },
+ "commit": "74cc90c49189a079b3cc93fde9c9ad76742f0184",
+ "repository_url": "https://github.com/Azure/azure-rest-api-specs",
+ "typespec_src": "specification/keyvault/Security.KeyVault.Secrets",
+ "emitterVersion": "0.60.2"
+}
\ No newline at end of file
diff --git a/sdk/keyvault/azure-keyvault-secrets/apiview-properties.json b/sdk/keyvault/azure-keyvault-secrets/apiview-properties.json
new file mode 100644
index 000000000000..d3c0b4781300
--- /dev/null
+++ b/sdk/keyvault/azure-keyvault-secrets/apiview-properties.json
@@ -0,0 +1,42 @@
+{
+ "CrossLanguagePackageId": "KeyVault",
+ "CrossLanguageDefinitionId": {
+ "azure.keyvault.secrets._generated.models.BackupSecretResult": "KeyVault.BackupSecretResult",
+ "azure.keyvault.secrets._generated.models.DeletedSecretBundle": "KeyVault.DeletedSecretBundle",
+ "azure.keyvault.secrets._generated.models.DeletedSecretItem": "KeyVault.DeletedSecretItem",
+ "azure.keyvault.secrets._generated.models.KeyVaultError": "KeyVaultError",
+ "azure.keyvault.secrets._generated.models.KeyVaultErrorError": "KeyVaultError.error.anonymous",
+ "azure.keyvault.secrets._generated.models.SecretAttributes": "KeyVault.SecretAttributes",
+ "azure.keyvault.secrets._generated.models.SecretBundle": "KeyVault.SecretBundle",
+ "azure.keyvault.secrets._generated.models.SecretItem": "KeyVault.SecretItem",
+ "azure.keyvault.secrets._generated.models.SecretRestoreParameters": "KeyVault.SecretRestoreParameters",
+ "azure.keyvault.secrets._generated.models.SecretSetParameters": "KeyVault.SecretSetParameters",
+ "azure.keyvault.secrets._generated.models.SecretUpdateParameters": "KeyVault.SecretUpdateParameters",
+ "azure.keyvault.secrets._generated.models.DeletionRecoveryLevel": "KeyVault.DeletionRecoveryLevel",
+ "azure.keyvault.secrets._generated.models.ContentType": "KeyVault.ContentType",
+ "azure.keyvault.secrets._generated.KeyVaultClient.set_secret": "KeyVault.setSecret",
+ "azure.keyvault.secrets._generated.aio.KeyVaultClient.set_secret": "KeyVault.setSecret",
+ "azure.keyvault.secrets._generated.KeyVaultClient.delete_secret": "KeyVault.deleteSecret",
+ "azure.keyvault.secrets._generated.aio.KeyVaultClient.delete_secret": "KeyVault.deleteSecret",
+ "azure.keyvault.secrets._generated.KeyVaultClient.update_secret": "KeyVault.updateSecret",
+ "azure.keyvault.secrets._generated.aio.KeyVaultClient.update_secret": "KeyVault.updateSecret",
+ "azure.keyvault.secrets._generated.KeyVaultClient.get_secret": "KeyVault.getSecret",
+ "azure.keyvault.secrets._generated.aio.KeyVaultClient.get_secret": "KeyVault.getSecret",
+ "azure.keyvault.secrets._generated.KeyVaultClient.get_secrets": "KeyVault.getSecrets",
+ "azure.keyvault.secrets._generated.aio.KeyVaultClient.get_secrets": "KeyVault.getSecrets",
+ "azure.keyvault.secrets._generated.KeyVaultClient.get_secret_versions": "KeyVault.getSecretVersions",
+ "azure.keyvault.secrets._generated.aio.KeyVaultClient.get_secret_versions": "KeyVault.getSecretVersions",
+ "azure.keyvault.secrets._generated.KeyVaultClient.get_deleted_secrets": "KeyVault.getDeletedSecrets",
+ "azure.keyvault.secrets._generated.aio.KeyVaultClient.get_deleted_secrets": "KeyVault.getDeletedSecrets",
+ "azure.keyvault.secrets._generated.KeyVaultClient.get_deleted_secret": "KeyVault.getDeletedSecret",
+ "azure.keyvault.secrets._generated.aio.KeyVaultClient.get_deleted_secret": "KeyVault.getDeletedSecret",
+ "azure.keyvault.secrets._generated.KeyVaultClient.purge_deleted_secret": "KeyVault.purgeDeletedSecret",
+ "azure.keyvault.secrets._generated.aio.KeyVaultClient.purge_deleted_secret": "KeyVault.purgeDeletedSecret",
+ "azure.keyvault.secrets._generated.KeyVaultClient.recover_deleted_secret": "KeyVault.recoverDeletedSecret",
+ "azure.keyvault.secrets._generated.aio.KeyVaultClient.recover_deleted_secret": "KeyVault.recoverDeletedSecret",
+ "azure.keyvault.secrets._generated.KeyVaultClient.backup_secret": "KeyVault.backupSecret",
+ "azure.keyvault.secrets._generated.aio.KeyVaultClient.backup_secret": "KeyVault.backupSecret",
+ "azure.keyvault.secrets._generated.KeyVaultClient.restore_secret": "KeyVault.restoreSecret",
+ "azure.keyvault.secrets._generated.aio.KeyVaultClient.restore_secret": "KeyVault.restoreSecret"
+ }
+}
\ No newline at end of file
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/__init__.py b/sdk/keyvault/azure-keyvault-secrets/azure/__init__.py
index 125860bac907..d55ccad1f573 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/__init__.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/__init__.py
@@ -1,6 +1 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-# pylint:disable=missing-docstring
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/__init__.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/__init__.py
index 125860bac907..d55ccad1f573 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/__init__.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/__init__.py
@@ -1,6 +1 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-# pylint:disable=missing-docstring
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/__init__.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/__init__.py
index ec1b5aaa0651..d55ccad1f573 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/__init__.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/__init__.py
@@ -1,19 +1 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-from ._models import DeletedSecret, KeyVaultSecret, KeyVaultSecretIdentifier, SecretProperties
-from ._shared.client_base import ApiVersion
-from ._client import SecretClient
-
-__all__ = [
- "ApiVersion",
- "SecretClient",
- "KeyVaultSecret",
- "KeyVaultSecretIdentifier",
- "SecretProperties",
- "DeletedSecret"
-]
-
-from ._version import VERSION
-__version__ = VERSION
+__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_client.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_client.py
deleted file mode 100644
index 7a64848801dd..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_client.py
+++ /dev/null
@@ -1,478 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-from datetime import datetime
-from functools import partial
-from typing import Any, cast, Dict, Optional
-
-from azure.core.paging import ItemPaged
-from azure.core.polling import LROPoller
-from azure.core.tracing.decorator import distributed_trace
-
-from ._models import KeyVaultSecret, DeletedSecret, SecretProperties
-from ._shared import KeyVaultClientBase
-from ._shared._polling import DeleteRecoverPollingMethod, KeyVaultOperationPoller
-
-
-class SecretClient(KeyVaultClientBase):
- """A high-level interface for managing a vault's secrets.
-
- :param str vault_url: URL of the vault the client will access. This is also called the vault's "DNS Name".
- You should validate that this URL references a valid Key Vault resource. See https://aka.ms/azsdk/blog/vault-uri
- for details.
- :param credential: An object which can provide an access token for the vault, such as a credential from
- :mod:`azure.identity`
- :type credential: ~azure.core.credentials.TokenCredential
-
- :keyword api_version: Version of the service API to use. Defaults to the most recent.
- :paramtype api_version: ~azure.keyvault.secrets.ApiVersion or str
- :keyword bool verify_challenge_resource: Whether to verify the authentication challenge resource matches the Key
- Vault domain. Defaults to True.
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets.py
- :start-after: [START create_secret_client]
- :end-before: [END create_secret_client]
- :language: python
- :caption: Create a new ``SecretClient``
- :dedent: 4
- """
-
- # pylint:disable=protected-access
-
- @distributed_trace
- def get_secret(self, name: str, version: Optional[str] = None, **kwargs: Any) -> KeyVaultSecret:
- """Get a secret. Requires the secrets/get permission.
-
- :param str name: The name of the secret
- :param str version: (optional) Version of the secret to get. If unspecified, gets the latest version.
-
- :returns: The fetched secret.
- :rtype: ~azure.keyvault.secrets.KeyVaultSecret
-
- :raises ~azure.core.exceptions.ResourceNotFoundError or ~azure.core.exceptions.HttpResponseError:
- the former if the secret doesn't exist; the latter for other errors
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets.py
- :start-after: [START get_secret]
- :end-before: [END get_secret]
- :language: python
- :caption: Get a secret
- :dedent: 8
- """
- bundle = self._client.get_secret(
- secret_name=name,
- secret_version=version or "",
- **kwargs
- )
- return KeyVaultSecret._from_secret_bundle(bundle)
-
- @distributed_trace
- def set_secret(
- self,
- name: str,
- value: str,
- *,
- enabled: Optional[bool] = None,
- tags: Optional[Dict[str, str]] = None,
- content_type: Optional[str] = None,
- not_before: Optional[datetime] = None,
- expires_on: Optional[datetime] = None,
- **kwargs: Any,
- ) -> KeyVaultSecret:
- """Set a secret value. If `name` is in use, create a new version of the secret. If not, create a new secret.
-
- Requires secrets/set permission.
-
- :param str name: The name of the secret
- :param str value: The value of the secret
-
- :keyword bool enabled: Whether the secret is enabled for use.
- :keyword tags: Application specific metadata in the form of key-value pairs.
- :paramtype tags: Dict[str, str] or None
- :keyword str content_type: An arbitrary string indicating the type of the secret, e.g. 'password'
- :keyword ~datetime.datetime not_before: Not before date of the secret in UTC
- :keyword ~datetime.datetime expires_on: Expiry date of the secret in UTC
-
- :returns: The created or updated secret.
- :rtype: ~azure.keyvault.secrets.KeyVaultSecret
-
- :raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets.py
- :start-after: [START set_secret]
- :end-before: [END set_secret]
- :language: python
- :caption: Set a secret's value
- :dedent: 8
-
- """
- if enabled is not None or not_before is not None or expires_on is not None:
- attributes = self._models.SecretAttributes(
- enabled=enabled, not_before=not_before, expires=expires_on
- )
- else:
- attributes = None
-
- parameters = self._models.SecretSetParameters(
- value=value,
- tags=tags,
- content_type=content_type,
- secret_attributes=attributes
- )
-
- bundle = self._client.set_secret(
- secret_name=name,
- parameters=parameters,
- **kwargs
- )
- return KeyVaultSecret._from_secret_bundle(bundle)
-
- @distributed_trace
- def update_secret_properties(
- self,
- name: str,
- version: Optional[str] = None,
- *,
- enabled: Optional[bool] = None,
- tags: Optional[Dict[str, str]] = None,
- content_type: Optional[str] = None,
- not_before: Optional[datetime] = None,
- expires_on: Optional[datetime] = None,
- **kwargs: Any,
- ) -> SecretProperties:
- """Update properties of a secret other than its value. Requires secrets/set permission.
-
- This method updates properties of the secret, such as whether it's enabled, but can't change the secret's
- value. Use :func:`set_secret` to change the secret's value.
-
- :param str name: Name of the secret
- :param str version: (optional) Version of the secret to update. If unspecified, the latest version is updated.
-
- :keyword bool enabled: Whether the secret is enabled for use.
- :keyword tags: Application specific metadata in the form of key-value pairs.
- :paramtype tags: Dict[str, str] or None
- :keyword str content_type: An arbitrary string indicating the type of the secret, e.g. 'password'
- :keyword ~datetime.datetime not_before: Not before date of the secret in UTC
- :keyword ~datetime.datetime expires_on: Expiry date of the secret in UTC
-
- :returns: The updated secret properties.
- :rtype: ~azure.keyvault.secrets.SecretProperties
-
- :raises ~azure.core.exceptions.ResourceNotFoundError or ~azure.core.exceptions.HttpResponseError:
- the former if the secret doesn't exist; the latter for other errors
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets.py
- :start-after: [START update_secret]
- :end-before: [END update_secret]
- :language: python
- :caption: Update a secret's attributes
- :dedent: 8
-
- """
- if enabled is not None or not_before is not None or expires_on is not None:
- attributes = self._models.SecretAttributes(
- enabled=enabled, not_before=not_before, expires=expires_on
- )
- else:
- attributes = None
-
- parameters = self._models.SecretUpdateParameters(
- content_type=content_type,
- secret_attributes=attributes,
- tags=tags,
- )
-
- bundle = self._client.update_secret(
- name,
- secret_version=version or "",
- parameters=parameters,
- **kwargs
- )
- return SecretProperties._from_secret_bundle(bundle) # pylint: disable=protected-access
-
- @distributed_trace
- def list_properties_of_secrets(self, **kwargs: Any) -> ItemPaged[SecretProperties]:
- """List identifiers and attributes of all secrets in the vault. Requires secrets/list permission.
-
- List items don't include secret values. Use :func:`get_secret` to get a secret's value.
-
- :returns: An iterator of secrets, excluding their values
- :rtype: ~azure.core.paging.ItemPaged[~azure.keyvault.secrets.SecretProperties]
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets.py
- :start-after: [START list_secrets]
- :end-before: [END list_secrets]
- :language: python
- :caption: List all secrets
- :dedent: 8
-
- """
- return self._client.get_secrets(
- maxresults=kwargs.pop("max_page_size", None),
- cls=lambda objs: [SecretProperties._from_secret_item(x) for x in objs],
- **kwargs
- )
-
- @distributed_trace
- def list_properties_of_secret_versions(self, name: str, **kwargs: Any) -> ItemPaged[SecretProperties]:
- """List properties of all versions of a secret, excluding their values. Requires secrets/list permission.
-
- List items don't include secret values. Use :func:`get_secret` to get a secret's value.
-
- :param str name: Name of the secret
-
- :returns: An iterator of secrets, excluding their values
- :rtype: ~azure.core.paging.ItemPaged[~azure.keyvault.secrets.SecretProperties]
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets.py
- :start-after: [START list_properties_of_secret_versions]
- :end-before: [END list_properties_of_secret_versions]
- :language: python
- :caption: List all versions of a secret
- :dedent: 8
-
- """
- return self._client.get_secret_versions(
- name,
- maxresults=kwargs.pop("max_page_size", None),
- cls=lambda objs: [SecretProperties._from_secret_item(x) for x in objs],
- **kwargs
- )
-
- @distributed_trace
- def backup_secret(self, name: str, **kwargs: Any) -> bytes:
- """Back up a secret in a protected form useable only by Azure Key Vault. Requires secrets/backup permission.
-
- :param str name: Name of the secret to back up
-
- :returns: The backup result, in a protected bytes format that can only be used by Azure Key Vault.
- :rtype: bytes
-
- :raises ~azure.core.exceptions.ResourceNotFoundError or ~azure.core.exceptions.HttpResponseError:
- the former if the secret doesn't exist; the latter for other errors
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets.py
- :start-after: [START backup_secret]
- :end-before: [END backup_secret]
- :language: python
- :caption: Back up a secret
- :dedent: 8
-
- """
- backup_result = self._client.backup_secret(name, **kwargs)
- return cast(bytes, backup_result.value)
-
- @distributed_trace
- def restore_secret_backup(self, backup: bytes, **kwargs: Any) -> SecretProperties:
- """Restore a backed up secret. Requires the secrets/restore permission.
-
- :param bytes backup: A secret backup as returned by :func:`backup_secret`
-
- :returns: The restored secret
- :rtype: ~azure.keyvault.secrets.SecretProperties
-
- :raises ~azure.core.exceptions.ResourceExistsError or ~azure.core.exceptions.HttpResponseError:
- the former if the secret's name is already in use; the latter for other errors
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets.py
- :start-after: [START restore_secret_backup]
- :end-before: [END restore_secret_backup]
- :language: python
- :caption: Restore a backed up secret
- :dedent: 8
-
- """
- bundle = self._client.restore_secret(
- parameters=self._models.SecretRestoreParameters(secret_bundle_backup=backup),
- **kwargs
- )
- return SecretProperties._from_secret_bundle(bundle)
-
- @distributed_trace
- def begin_delete_secret(self, name: str, **kwargs: Any) -> LROPoller[DeletedSecret]: # pylint:disable=bad-option-value,delete-operation-wrong-return-type
- """Delete all versions of a secret. Requires secrets/delete permission.
-
- When this method returns Key Vault has begun deleting the secret. Deletion may take several seconds in a vault
- with soft-delete enabled. This method therefore returns a poller enabling you to wait for deletion to complete.
-
- :param str name: Name of the secret to delete.
-
- :returns: A poller for the delete operation. The poller's `result` method returns the
- :class:`~azure.keyvault.secrets.DeletedSecret` without waiting for deletion to complete. If the vault has
- soft-delete enabled and you want to permanently delete the secret with :func:`purge_deleted_secret`, call
- the poller's `wait` method first. It will block until the deletion is complete. The `wait` method requires
- secrets/get permission.
- :rtype: ~azure.core.polling.LROPoller[~azure.keyvault.secrets.DeletedSecret]
-
- :raises ~azure.core.exceptions.ResourceNotFoundError or ~azure.core.exceptions.HttpResponseError:
- the former if the secret doesn't exist; the latter for other errors
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets.py
- :start-after: [START delete_secret]
- :end-before: [END delete_secret]
- :language: python
- :caption: Delete a secret
- :dedent: 8
-
- """
- polling_interval = kwargs.pop("_polling_interval", None)
- if polling_interval is None:
- polling_interval = 2
- # Ignore pyright warning about return type not being iterable because we use `cls` to return a tuple
- pipeline_response, deleted_secret_bundle = self._client.delete_secret(
- secret_name=name,
- cls=lambda pipeline_response, deserialized, _: (pipeline_response, deserialized),
- **kwargs,
- ) # pyright: ignore[reportGeneralTypeIssues]
- deleted_secret = DeletedSecret._from_deleted_secret_bundle(deleted_secret_bundle)
-
- command = partial(self.get_deleted_secret, name=name, **kwargs)
- polling_method = DeleteRecoverPollingMethod(
- # no recovery ID means soft-delete is disabled, in which case we initialize the poller as finished
- finished=deleted_secret.recovery_id is None,
- pipeline_response=pipeline_response,
- command=command,
- final_resource=deleted_secret,
- interval=polling_interval,
- )
- return KeyVaultOperationPoller(polling_method)
-
- @distributed_trace
- def get_deleted_secret(self, name: str, **kwargs: Any) -> DeletedSecret:
- """Get a deleted secret. Possible only in vaults with soft-delete enabled. Requires secrets/get permission.
-
- :param str name: Name of the deleted secret
-
- :returns: The deleted secret.
- :rtype: ~azure.keyvault.secrets.DeletedSecret
-
- :raises ~azure.core.exceptions.ResourceNotFoundError or ~azure.core.exceptions.HttpResponseError:
- the former if the deleted secret doesn't exist; the latter for other errors
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets.py
- :start-after: [START get_deleted_secret]
- :end-before: [END get_deleted_secret]
- :language: python
- :caption: Get a deleted secret
- :dedent: 8
-
- """
- bundle = self._client.get_deleted_secret(name, **kwargs)
- return DeletedSecret._from_deleted_secret_bundle(bundle)
-
- @distributed_trace
- def list_deleted_secrets(self, **kwargs: Any) -> ItemPaged[DeletedSecret]:
- """Lists all deleted secrets. Possible only in vaults with soft-delete enabled.
-
- Requires secrets/list permission.
-
- :returns: An iterator of deleted secrets, excluding their values
- :rtype: ~azure.core.paging.ItemPaged[~azure.keyvault.secrets.DeletedSecret]
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets.py
- :start-after: [START list_deleted_secrets]
- :end-before: [END list_deleted_secrets]
- :language: python
- :caption: List deleted secrets
- :dedent: 8
-
- """
- return self._client.get_deleted_secrets(
- maxresults=kwargs.pop("max_page_size", None),
- cls=lambda objs: [DeletedSecret._from_deleted_secret_item(x) for x in objs],
- **kwargs
- )
-
- @distributed_trace
- def purge_deleted_secret(self, name: str, **kwargs: Any) -> None:
- """Permanently deletes a deleted secret. Possible only in vaults with soft-delete enabled.
-
- Performs an irreversible deletion of the specified secret, without possibility for recovery. The operation is
- not available if the :py:attr:`~azure.keyvault.secrets.SecretProperties.recovery_level` does not specify
- 'Purgeable'. This method is only necessary for purging a secret before its
- :py:attr:`~azure.keyvault.secrets.DeletedSecret.scheduled_purge_date`.
-
- Requires secrets/purge permission.
-
- :param str name: Name of the deleted secret to purge
-
- :returns: None
-
- :raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # if the vault has soft-delete enabled, purge permanently deletes the secret
- # (with soft-delete disabled, begin_delete_secret is permanent)
- secret_client.purge_deleted_secret("secret-name")
-
- """
- self._client.purge_deleted_secret(name, **kwargs)
-
- @distributed_trace
- def begin_recover_deleted_secret(self, name: str, **kwargs: Any) -> LROPoller[SecretProperties]:
- """Recover a deleted secret to its latest version. Possible only in a vault with soft-delete enabled.
-
- Requires the secrets/recover permission. If the vault does not have soft-delete enabled,
- :func:`begin_delete_secret` is permanent, and this method will return an error. Attempting to recover a
- non-deleted secret will also return an error. When this method returns Key Vault has begun recovering the
- secret. Recovery may take several seconds. This method therefore returns a poller enabling you to wait for
- recovery to complete. Waiting is only necessary when you want to use the recovered secret in another operation
- immediately.
-
- :param str name: Name of the deleted secret to recover
-
- :returns: A poller for the recovery operation. The poller's `result` method returns the recovered secret's
- :class:`~azure.keyvault.secrets.SecretProperties` without waiting for recovery to complete. If you want to
- use the recovered secret immediately, call the poller's `wait` method, which blocks until the secret is
- ready to use. The `wait` method requires secrets/get permission.
- :rtype: ~azure.core.polling.LROPoller[~azure.keyvault.secrets.SecretProperties]
-
- :raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets.py
- :start-after: [START recover_deleted_secret]
- :end-before: [END recover_deleted_secret]
- :language: python
- :caption: Recover a deleted secret
- :dedent: 8
-
- """
- polling_interval = kwargs.pop("_polling_interval", None)
- if polling_interval is None:
- polling_interval = 2
- # Ignore pyright warning about return type not being iterable because we use `cls` to return a tuple
- pipeline_response, recovered_secret_bundle = self._client.recover_deleted_secret(
- secret_name=name,
- cls=lambda pipeline_response, deserialized, _: (pipeline_response, deserialized),
- **kwargs,
- ) # pyright: ignore[reportGeneralTypeIssues]
- recovered_secret = SecretProperties._from_secret_bundle(recovered_secret_bundle)
-
- command = partial(self.get_secret, name=name, **kwargs)
- polling_method = DeleteRecoverPollingMethod(
- finished=False,
- pipeline_response=pipeline_response,
- command=command,
- final_resource=recovered_secret,
- interval=polling_interval,
- )
- return KeyVaultOperationPoller(polling_method)
-
- def __enter__(self) -> "SecretClient":
- self._client.__enter__()
- return self
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_client.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_client.py
index affcf5d228d3..814ac7783e86 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_client.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_client.py
@@ -15,14 +15,14 @@
from azure.core.rest import HttpRequest, HttpResponse
from ._configuration import KeyVaultClientConfiguration
-from ._operations import KeyVaultClientOperationsMixin
+from ._operations import _KeyVaultClientOperationsMixin
from ._utils.serialization import Deserializer, Serializer
if TYPE_CHECKING:
from azure.core.credentials import TokenCredential
-class KeyVaultClient(KeyVaultClientOperationsMixin):
+class KeyVaultClient(_KeyVaultClientOperationsMixin):
"""The key vault client performs cryptographic key operations and vault operations against the Key
Vault service.
@@ -30,8 +30,9 @@ class KeyVaultClient(KeyVaultClientOperationsMixin):
:type vault_base_url: str
:param credential: Credential used to authenticate requests to the service. Required.
:type credential: ~azure.core.credentials.TokenCredential
- :keyword api_version: The API version to use for this operation. Default value is "7.6". Note
- that overriding this default value may result in unsupported behavior.
+ :keyword api_version: The API version to use for this operation. Known values are "2025-07-01"
+ and None. Default value is "2025-07-01". Note that overriding this default value may result in
+ unsupported behavior.
:paramtype api_version: str
"""
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_configuration.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_configuration.py
index 27abab8a46ce..5d08b51b8e7c 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_configuration.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_configuration.py
@@ -26,13 +26,14 @@ class KeyVaultClientConfiguration: # pylint: disable=too-many-instance-attribut
:type vault_base_url: str
:param credential: Credential used to authenticate requests to the service. Required.
:type credential: ~azure.core.credentials.TokenCredential
- :keyword api_version: The API version to use for this operation. Default value is "7.6". Note
- that overriding this default value may result in unsupported behavior.
+ :keyword api_version: The API version to use for this operation. Known values are "2025-07-01"
+ and None. Default value is "2025-07-01". Note that overriding this default value may result in
+ unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, vault_base_url: str, credential: "TokenCredential", **kwargs: Any) -> None:
- api_version: str = kwargs.pop("api_version", "7.6")
+ api_version: str = kwargs.pop("api_version", "2025-07-01")
if vault_base_url is None:
raise ValueError("Parameter 'vault_base_url' must not be None.")
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_model_base.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_model_base.py
deleted file mode 100644
index 3072ee252ed9..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_model_base.py
+++ /dev/null
@@ -1,1235 +0,0 @@
-# pylint: disable=too-many-lines
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for
-# license information.
-# --------------------------------------------------------------------------
-# pylint: disable=protected-access, broad-except
-
-import copy
-import calendar
-import decimal
-import functools
-import sys
-import logging
-import base64
-import re
-import typing
-import enum
-import email.utils
-from datetime import datetime, date, time, timedelta, timezone
-from json import JSONEncoder
-import xml.etree.ElementTree as ET
-from typing_extensions import Self
-import isodate
-from azure.core.exceptions import DeserializationError
-from azure.core import CaseInsensitiveEnumMeta
-from azure.core.pipeline import PipelineResponse
-from azure.core.serialization import _Null
-
-if sys.version_info >= (3, 9):
- from collections.abc import MutableMapping
-else:
- from typing import MutableMapping
-
-_LOGGER = logging.getLogger(__name__)
-
-__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"]
-
-TZ_UTC = timezone.utc
-_T = typing.TypeVar("_T")
-
-
-def _timedelta_as_isostr(td: timedelta) -> str:
- """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S'
-
- Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython
-
- :param timedelta td: The timedelta to convert
- :rtype: str
- :return: ISO8601 version of this timedelta
- """
-
- # Split seconds to larger units
- seconds = td.total_seconds()
- minutes, seconds = divmod(seconds, 60)
- hours, minutes = divmod(minutes, 60)
- days, hours = divmod(hours, 24)
-
- days, hours, minutes = list(map(int, (days, hours, minutes)))
- seconds = round(seconds, 6)
-
- # Build date
- date_str = ""
- if days:
- date_str = "%sD" % days
-
- if hours or minutes or seconds:
- # Build time
- time_str = "T"
-
- # Hours
- bigger_exists = date_str or hours
- if bigger_exists:
- time_str += "{:02}H".format(hours)
-
- # Minutes
- bigger_exists = bigger_exists or minutes
- if bigger_exists:
- time_str += "{:02}M".format(minutes)
-
- # Seconds
- try:
- if seconds.is_integer():
- seconds_string = "{:02}".format(int(seconds))
- else:
- # 9 chars long w/ leading 0, 6 digits after decimal
- seconds_string = "%09.6f" % seconds
- # Remove trailing zeros
- seconds_string = seconds_string.rstrip("0")
- except AttributeError: # int.is_integer() raises
- seconds_string = "{:02}".format(seconds)
-
- time_str += "{}S".format(seconds_string)
- else:
- time_str = ""
-
- return "P" + date_str + time_str
-
-
-def _serialize_bytes(o, format: typing.Optional[str] = None) -> str:
- encoded = base64.b64encode(o).decode()
- if format == "base64url":
- return encoded.strip("=").replace("+", "-").replace("/", "_")
- return encoded
-
-
-def _serialize_datetime(o, format: typing.Optional[str] = None):
- if hasattr(o, "year") and hasattr(o, "hour"):
- if format == "rfc7231":
- return email.utils.format_datetime(o, usegmt=True)
- if format == "unix-timestamp":
- return int(calendar.timegm(o.utctimetuple()))
-
- # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set)
- if not o.tzinfo:
- iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat()
- else:
- iso_formatted = o.astimezone(TZ_UTC).isoformat()
- # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt)
- return iso_formatted.replace("+00:00", "Z")
- # Next try datetime.date or datetime.time
- return o.isoformat()
-
-
-def _is_readonly(p):
- try:
- return p._visibility == ["read"]
- except AttributeError:
- return False
-
-
-class SdkJSONEncoder(JSONEncoder):
- """A JSON encoder that's capable of serializing datetime objects and bytes."""
-
- def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs):
- super().__init__(*args, **kwargs)
- self.exclude_readonly = exclude_readonly
- self.format = format
-
- def default(self, o): # pylint: disable=too-many-return-statements
- if _is_model(o):
- if self.exclude_readonly:
- readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)]
- return {k: v for k, v in o.items() if k not in readonly_props}
- return dict(o.items())
- try:
- return super(SdkJSONEncoder, self).default(o)
- except TypeError:
- if isinstance(o, _Null):
- return None
- if isinstance(o, decimal.Decimal):
- return float(o)
- if isinstance(o, (bytes, bytearray)):
- return _serialize_bytes(o, self.format)
- try:
- # First try datetime.datetime
- return _serialize_datetime(o, self.format)
- except AttributeError:
- pass
- # Last, try datetime.timedelta
- try:
- return _timedelta_as_isostr(o)
- except AttributeError:
- # This will be raised when it hits value.total_seconds in the method above
- pass
- return super(SdkJSONEncoder, self).default(o)
-
-
-_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
-_VALID_RFC7231 = re.compile(
- r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s"
- r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT"
-)
-
-
-def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime:
- """Deserialize ISO-8601 formatted string into Datetime object.
-
- :param str attr: response string to be deserialized.
- :rtype: ~datetime.datetime
- :returns: The datetime object from that input
- """
- if isinstance(attr, datetime):
- # i'm already deserialized
- return attr
- attr = attr.upper()
- match = _VALID_DATE.match(attr)
- if not match:
- raise ValueError("Invalid datetime string: " + attr)
-
- check_decimal = attr.split(".")
- if len(check_decimal) > 1:
- decimal_str = ""
- for digit in check_decimal[1]:
- if digit.isdigit():
- decimal_str += digit
- else:
- break
- if len(decimal_str) > 6:
- attr = attr.replace(decimal_str, decimal_str[0:6])
-
- date_obj = isodate.parse_datetime(attr)
- test_utc = date_obj.utctimetuple()
- if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
- raise OverflowError("Hit max or min date")
- return date_obj
-
-
-def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime:
- """Deserialize RFC7231 formatted string into Datetime object.
-
- :param str attr: response string to be deserialized.
- :rtype: ~datetime.datetime
- :returns: The datetime object from that input
- """
- if isinstance(attr, datetime):
- # i'm already deserialized
- return attr
- match = _VALID_RFC7231.match(attr)
- if not match:
- raise ValueError("Invalid datetime string: " + attr)
-
- return email.utils.parsedate_to_datetime(attr)
-
-
-def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime:
- """Deserialize unix timestamp into Datetime object.
-
- :param str attr: response string to be deserialized.
- :rtype: ~datetime.datetime
- :returns: The datetime object from that input
- """
- if isinstance(attr, datetime):
- # i'm already deserialized
- return attr
- return datetime.fromtimestamp(attr, TZ_UTC)
-
-
-def _deserialize_date(attr: typing.Union[str, date]) -> date:
- """Deserialize ISO-8601 formatted string into Date object.
- :param str attr: response string to be deserialized.
- :rtype: date
- :returns: The date object from that input
- """
- # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
- if isinstance(attr, date):
- return attr
- return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore
-
-
-def _deserialize_time(attr: typing.Union[str, time]) -> time:
- """Deserialize ISO-8601 formatted string into time object.
-
- :param str attr: response string to be deserialized.
- :rtype: datetime.time
- :returns: The time object from that input
- """
- if isinstance(attr, time):
- return attr
- return isodate.parse_time(attr)
-
-
-def _deserialize_bytes(attr):
- if isinstance(attr, (bytes, bytearray)):
- return attr
- return bytes(base64.b64decode(attr))
-
-
-def _deserialize_bytes_base64(attr):
- if isinstance(attr, (bytes, bytearray)):
- return attr
- padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore
- attr = attr + padding # type: ignore
- encoded = attr.replace("-", "+").replace("_", "/")
- return bytes(base64.b64decode(encoded))
-
-
-def _deserialize_duration(attr):
- if isinstance(attr, timedelta):
- return attr
- return isodate.parse_duration(attr)
-
-
-def _deserialize_decimal(attr):
- if isinstance(attr, decimal.Decimal):
- return attr
- return decimal.Decimal(str(attr))
-
-
-def _deserialize_int_as_str(attr):
- if isinstance(attr, int):
- return attr
- return int(attr)
-
-
-_DESERIALIZE_MAPPING = {
- datetime: _deserialize_datetime,
- date: _deserialize_date,
- time: _deserialize_time,
- bytes: _deserialize_bytes,
- bytearray: _deserialize_bytes,
- timedelta: _deserialize_duration,
- typing.Any: lambda x: x,
- decimal.Decimal: _deserialize_decimal,
-}
-
-_DESERIALIZE_MAPPING_WITHFORMAT = {
- "rfc3339": _deserialize_datetime,
- "rfc7231": _deserialize_datetime_rfc7231,
- "unix-timestamp": _deserialize_datetime_unix_timestamp,
- "base64": _deserialize_bytes,
- "base64url": _deserialize_bytes_base64,
-}
-
-
-def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None):
- if annotation is int and rf and rf._format == "str":
- return _deserialize_int_as_str
- if rf and rf._format:
- return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format)
- return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
-
-
-def _get_type_alias_type(module_name: str, alias_name: str):
- types = {
- k: v
- for k, v in sys.modules[module_name].__dict__.items()
- if isinstance(v, typing._GenericAlias) # type: ignore
- }
- if alias_name not in types:
- return alias_name
- return types[alias_name]
-
-
-def _get_model(module_name: str, model_name: str):
- models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)}
- module_end = module_name.rsplit(".", 1)[0]
- models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)})
- if isinstance(model_name, str):
- model_name = model_name.split(".")[-1]
- if model_name not in models:
- return model_name
- return models[model_name]
-
-
-_UNSET = object()
-
-
-class _MyMutableMapping(MutableMapping[str, typing.Any]): # pylint: disable=unsubscriptable-object
- def __init__(self, data: typing.Dict[str, typing.Any]) -> None:
- self._data = data
-
- def __contains__(self, key: typing.Any) -> bool:
- return key in self._data
-
- def __getitem__(self, key: str) -> typing.Any:
- return self._data.__getitem__(key)
-
- def __setitem__(self, key: str, value: typing.Any) -> None:
- self._data.__setitem__(key, value)
-
- def __delitem__(self, key: str) -> None:
- self._data.__delitem__(key)
-
- def __iter__(self) -> typing.Iterator[typing.Any]:
- return self._data.__iter__()
-
- def __len__(self) -> int:
- return self._data.__len__()
-
- def __ne__(self, other: typing.Any) -> bool:
- return not self.__eq__(other)
-
- def keys(self) -> typing.KeysView[str]:
- """
- :returns: a set-like object providing a view on D's keys
- :rtype: ~typing.KeysView
- """
- return self._data.keys()
-
- def values(self) -> typing.ValuesView[typing.Any]:
- """
- :returns: an object providing a view on D's values
- :rtype: ~typing.ValuesView
- """
- return self._data.values()
-
- def items(self) -> typing.ItemsView[str, typing.Any]:
- """
- :returns: set-like object providing a view on D's items
- :rtype: ~typing.ItemsView
- """
- return self._data.items()
-
- def get(self, key: str, default: typing.Any = None) -> typing.Any:
- """
- Get the value for key if key is in the dictionary, else default.
- :param str key: The key to look up.
- :param any default: The value to return if key is not in the dictionary. Defaults to None
- :returns: D[k] if k in D, else d.
- :rtype: any
- """
- try:
- return self[key]
- except KeyError:
- return default
-
- @typing.overload
- def pop(self, key: str) -> typing.Any: ...
-
- @typing.overload
- def pop(self, key: str, default: _T) -> _T: ...
-
- @typing.overload
- def pop(self, key: str, default: typing.Any) -> typing.Any: ...
-
- def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any:
- """
- Removes specified key and return the corresponding value.
- :param str key: The key to pop.
- :param any default: The value to return if key is not in the dictionary
- :returns: The value corresponding to the key.
- :rtype: any
- :raises KeyError: If key is not found and default is not given.
- """
- if default is _UNSET:
- return self._data.pop(key)
- return self._data.pop(key, default)
-
- def popitem(self) -> typing.Tuple[str, typing.Any]:
- """
- Removes and returns some (key, value) pair
- :returns: The (key, value) pair.
- :rtype: tuple
- :raises KeyError: if D is empty.
- """
- return self._data.popitem()
-
- def clear(self) -> None:
- """
- Remove all items from D.
- """
- self._data.clear()
-
- def update(self, *args: typing.Any, **kwargs: typing.Any) -> None:
- """
- Updates D from mapping/iterable E and F.
- :param any args: Either a mapping object or an iterable of key-value pairs.
- """
- self._data.update(*args, **kwargs)
-
- @typing.overload
- def setdefault(self, key: str, default: None = None) -> None: ...
-
- @typing.overload
- def setdefault(self, key: str, default: typing.Any) -> typing.Any: ...
-
- def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any:
- """
- Same as calling D.get(k, d), and setting D[k]=d if k not found
- :param str key: The key to look up.
- :param any default: The value to set if key is not in the dictionary
- :returns: D[k] if k in D, else d.
- :rtype: any
- """
- if default is _UNSET:
- return self._data.setdefault(key)
- return self._data.setdefault(key, default)
-
- def __eq__(self, other: typing.Any) -> bool:
- try:
- other_model = self.__class__(other)
- except Exception:
- return False
- return self._data == other_model._data
-
- def __repr__(self) -> str:
- return str(self._data)
-
-
-def _is_model(obj: typing.Any) -> bool:
- return getattr(obj, "_is_model", False)
-
-
-def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements
- if isinstance(o, list):
- return [_serialize(x, format) for x in o]
- if isinstance(o, dict):
- return {k: _serialize(v, format) for k, v in o.items()}
- if isinstance(o, set):
- return {_serialize(x, format) for x in o}
- if isinstance(o, tuple):
- return tuple(_serialize(x, format) for x in o)
- if isinstance(o, (bytes, bytearray)):
- return _serialize_bytes(o, format)
- if isinstance(o, decimal.Decimal):
- return float(o)
- if isinstance(o, enum.Enum):
- return o.value
- if isinstance(o, int):
- if format == "str":
- return str(o)
- return o
- try:
- # First try datetime.datetime
- return _serialize_datetime(o, format)
- except AttributeError:
- pass
- # Last, try datetime.timedelta
- try:
- return _timedelta_as_isostr(o)
- except AttributeError:
- # This will be raised when it hits value.total_seconds in the method above
- pass
- return o
-
-
-def _get_rest_field(
- attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str
-) -> typing.Optional["_RestField"]:
- try:
- return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name)
- except StopIteration:
- return None
-
-
-def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any:
- if not rf:
- return _serialize(value, None)
- if rf._is_multipart_file_input:
- return value
- if rf._is_model:
- return _deserialize(rf._type, value)
- if isinstance(value, ET.Element):
- value = _deserialize(rf._type, value)
- return _serialize(value, rf._format)
-
-
-class Model(_MyMutableMapping):
- _is_model = True
- # label whether current class's _attr_to_rest_field has been calculated
- # could not see _attr_to_rest_field directly because subclass inherits it from parent class
- _calculated: typing.Set[str] = set()
-
- def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None:
- class_name = self.__class__.__name__
- if len(args) > 1:
- raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given")
- dict_to_pass = {
- rest_field._rest_name: rest_field._default
- for rest_field in self._attr_to_rest_field.values()
- if rest_field._default is not _UNSET
- }
- if args: # pylint: disable=too-many-nested-blocks
- if isinstance(args[0], ET.Element):
- existed_attr_keys = []
- model_meta = getattr(self, "_xml", {})
-
- for rf in self._attr_to_rest_field.values():
- prop_meta = getattr(rf, "_xml", {})
- xml_name = prop_meta.get("name", rf._rest_name)
- xml_ns = prop_meta.get("ns", model_meta.get("ns", None))
- if xml_ns:
- xml_name = "{" + xml_ns + "}" + xml_name
-
- # attribute
- if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None:
- existed_attr_keys.append(xml_name)
- dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name))
- continue
-
- # unwrapped element is array
- if prop_meta.get("unwrapped", False):
- # unwrapped array could either use prop items meta/prop meta
- if prop_meta.get("itemsName"):
- xml_name = prop_meta.get("itemsName")
- xml_ns = prop_meta.get("itemNs")
- if xml_ns:
- xml_name = "{" + xml_ns + "}" + xml_name
- items = args[0].findall(xml_name) # pyright: ignore
- if len(items) > 0:
- existed_attr_keys.append(xml_name)
- dict_to_pass[rf._rest_name] = _deserialize(rf._type, items)
- continue
-
- # text element is primitive type
- if prop_meta.get("text", False):
- if args[0].text is not None:
- dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text)
- continue
-
- # wrapped element could be normal property or array, it should only have one element
- item = args[0].find(xml_name)
- if item is not None:
- existed_attr_keys.append(xml_name)
- dict_to_pass[rf._rest_name] = _deserialize(rf._type, item)
-
- # rest thing is additional properties
- for e in args[0]:
- if e.tag not in existed_attr_keys:
- dict_to_pass[e.tag] = _convert_element(e)
- else:
- dict_to_pass.update(
- {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()}
- )
- else:
- non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field]
- if non_attr_kwargs:
- # actual type errors only throw the first wrong keyword arg they see, so following that.
- raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'")
- dict_to_pass.update(
- {
- self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v)
- for k, v in kwargs.items()
- if v is not None
- }
- )
- super().__init__(dict_to_pass)
-
- def copy(self) -> "Model":
- return Model(self.__dict__)
-
- def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self:
- if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated:
- # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping',
- # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object'
- mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order
- attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property
- k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type")
- }
- annotations = {
- k: v
- for mro_class in mros
- if hasattr(mro_class, "__annotations__")
- for k, v in mro_class.__annotations__.items()
- }
- for attr, rf in attr_to_rest_field.items():
- rf._module = cls.__module__
- if not rf._type:
- rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None))
- if not rf._rest_name_input:
- rf._rest_name_input = attr
- cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items())
- cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}")
-
- return super().__new__(cls) # pylint: disable=no-value-for-parameter
-
- def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None:
- for base in cls.__bases__:
- if hasattr(base, "__mapping__"):
- base.__mapping__[discriminator or cls.__name__] = cls # type: ignore
-
- @classmethod
- def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]:
- for v in cls.__dict__.values():
- if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators:
- return v
- return None
-
- @classmethod
- def _deserialize(cls, data, exist_discriminators):
- if not hasattr(cls, "__mapping__"):
- return cls(data)
- discriminator = cls._get_discriminator(exist_discriminators)
- if discriminator is None:
- return cls(data)
- exist_discriminators.append(discriminator._rest_name)
- if isinstance(data, ET.Element):
- model_meta = getattr(cls, "_xml", {})
- prop_meta = getattr(discriminator, "_xml", {})
- xml_name = prop_meta.get("name", discriminator._rest_name)
- xml_ns = prop_meta.get("ns", model_meta.get("ns", None))
- if xml_ns:
- xml_name = "{" + xml_ns + "}" + xml_name
-
- if data.get(xml_name) is not None:
- discriminator_value = data.get(xml_name)
- else:
- discriminator_value = data.find(xml_name).text # pyright: ignore
- else:
- discriminator_value = data.get(discriminator._rest_name)
- mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore
- return mapped_cls._deserialize(data, exist_discriminators)
-
- def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]:
- """Return a dict that can be turned into json using json.dump.
-
- :keyword bool exclude_readonly: Whether to remove the readonly properties.
- :returns: A dict JSON compatible object
- :rtype: dict
- """
-
- result = {}
- readonly_props = []
- if exclude_readonly:
- readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)]
- for k, v in self.items():
- if exclude_readonly and k in readonly_props: # pyright: ignore
- continue
- is_multipart_file_input = False
- try:
- is_multipart_file_input = next(
- rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k
- )._is_multipart_file_input
- except StopIteration:
- pass
- result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly)
- return result
-
- @staticmethod
- def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any:
- if v is None or isinstance(v, _Null):
- return None
- if isinstance(v, (list, tuple, set)):
- return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v)
- if isinstance(v, dict):
- return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()}
- return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v
-
-
-def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj):
- if _is_model(obj):
- return obj
- return _deserialize(model_deserializer, obj)
-
-
-def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj):
- if obj is None:
- return obj
- return _deserialize_with_callable(if_obj_deserializer, obj)
-
-
-def _deserialize_with_union(deserializers, obj):
- for deserializer in deserializers:
- try:
- return _deserialize(deserializer, obj)
- except DeserializationError:
- pass
- raise DeserializationError()
-
-
-def _deserialize_dict(
- value_deserializer: typing.Optional[typing.Callable],
- module: typing.Optional[str],
- obj: typing.Dict[typing.Any, typing.Any],
-):
- if obj is None:
- return obj
- if isinstance(obj, ET.Element):
- obj = {child.tag: child for child in obj}
- return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()}
-
-
-def _deserialize_multiple_sequence(
- entry_deserializers: typing.List[typing.Optional[typing.Callable]],
- module: typing.Optional[str],
- obj,
-):
- if obj is None:
- return obj
- return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers))
-
-
-def _deserialize_sequence(
- deserializer: typing.Optional[typing.Callable],
- module: typing.Optional[str],
- obj,
-):
- if obj is None:
- return obj
- if isinstance(obj, ET.Element):
- obj = list(obj)
- return type(obj)(_deserialize(deserializer, entry, module) for entry in obj)
-
-
-def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]:
- return sorted(
- types,
- key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"),
- )
-
-
-def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches
- annotation: typing.Any,
- module: typing.Optional[str],
- rf: typing.Optional["_RestField"] = None,
-) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]:
- if not annotation:
- return None
-
- # is it a type alias?
- if isinstance(annotation, str):
- if module is not None:
- annotation = _get_type_alias_type(module, annotation)
-
- # is it a forward ref / in quotes?
- if isinstance(annotation, (str, typing.ForwardRef)):
- try:
- model_name = annotation.__forward_arg__ # type: ignore
- except AttributeError:
- model_name = annotation
- if module is not None:
- annotation = _get_model(module, model_name) # type: ignore
-
- try:
- if module and _is_model(annotation):
- if rf:
- rf._is_model = True
-
- return functools.partial(_deserialize_model, annotation) # pyright: ignore
- except Exception:
- pass
-
- # is it a literal?
- try:
- if annotation.__origin__ is typing.Literal: # pyright: ignore
- return None
- except AttributeError:
- pass
-
- # is it optional?
- try:
- if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore
- if len(annotation.__args__) <= 2: # pyright: ignore
- if_obj_deserializer = _get_deserialize_callable_from_annotation(
- next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore
- )
-
- return functools.partial(_deserialize_with_optional, if_obj_deserializer)
- # the type is Optional[Union[...]], we need to remove the None type from the Union
- annotation_copy = copy.copy(annotation)
- annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore
- return _get_deserialize_callable_from_annotation(annotation_copy, module, rf)
- except AttributeError:
- pass
-
- # is it union?
- if getattr(annotation, "__origin__", None) is typing.Union:
- # initial ordering is we make `string` the last deserialization option, because it is often them most generic
- deserializers = [
- _get_deserialize_callable_from_annotation(arg, module, rf)
- for arg in _sorted_annotations(annotation.__args__) # pyright: ignore
- ]
-
- return functools.partial(_deserialize_with_union, deserializers)
-
- try:
- if annotation._name == "Dict": # pyright: ignore
- value_deserializer = _get_deserialize_callable_from_annotation(
- annotation.__args__[1], module, rf # pyright: ignore
- )
-
- return functools.partial(
- _deserialize_dict,
- value_deserializer,
- module,
- )
- except (AttributeError, IndexError):
- pass
- try:
- if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore
- if len(annotation.__args__) > 1: # pyright: ignore
- entry_deserializers = [
- _get_deserialize_callable_from_annotation(dt, module, rf)
- for dt in annotation.__args__ # pyright: ignore
- ]
- return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module)
- deserializer = _get_deserialize_callable_from_annotation(
- annotation.__args__[0], module, rf # pyright: ignore
- )
-
- return functools.partial(_deserialize_sequence, deserializer, module)
- except (TypeError, IndexError, AttributeError, SyntaxError):
- pass
-
- def _deserialize_default(
- deserializer,
- obj,
- ):
- if obj is None:
- return obj
- try:
- return _deserialize_with_callable(deserializer, obj)
- except Exception:
- pass
- return obj
-
- if get_deserializer(annotation, rf):
- return functools.partial(_deserialize_default, get_deserializer(annotation, rf))
-
- return functools.partial(_deserialize_default, annotation)
-
-
-def _deserialize_with_callable(
- deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]],
- value: typing.Any,
-): # pylint: disable=too-many-return-statements
- try:
- if value is None or isinstance(value, _Null):
- return None
- if isinstance(value, ET.Element):
- if deserializer is str:
- return value.text or ""
- if deserializer is int:
- return int(value.text) if value.text else None
- if deserializer is float:
- return float(value.text) if value.text else None
- if deserializer is bool:
- return value.text == "true" if value.text else None
- if deserializer is None:
- return value
- if deserializer in [int, float, bool]:
- return deserializer(value)
- if isinstance(deserializer, CaseInsensitiveEnumMeta):
- try:
- return deserializer(value)
- except ValueError:
- # for unknown value, return raw value
- return value
- if isinstance(deserializer, type) and issubclass(deserializer, Model):
- return deserializer._deserialize(value, [])
- return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value)
- except Exception as e:
- raise DeserializationError() from e
-
-
-def _deserialize(
- deserializer: typing.Any,
- value: typing.Any,
- module: typing.Optional[str] = None,
- rf: typing.Optional["_RestField"] = None,
- format: typing.Optional[str] = None,
-) -> typing.Any:
- if isinstance(value, PipelineResponse):
- value = value.http_response.json()
- if rf is None and format:
- rf = _RestField(format=format)
- if not isinstance(deserializer, functools.partial):
- deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf)
- return _deserialize_with_callable(deserializer, value)
-
-
-def _failsafe_deserialize(
- deserializer: typing.Any,
- value: typing.Any,
- module: typing.Optional[str] = None,
- rf: typing.Optional["_RestField"] = None,
- format: typing.Optional[str] = None,
-) -> typing.Any:
- try:
- return _deserialize(deserializer, value, module, rf, format)
- except DeserializationError:
- _LOGGER.warning(
- "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
- )
- return None
-
-
-def _failsafe_deserialize_xml(
- deserializer: typing.Any,
- value: typing.Any,
-) -> typing.Any:
- try:
- return _deserialize_xml(deserializer, value)
- except DeserializationError:
- _LOGGER.warning(
- "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
- )
- return None
-
-
-class _RestField:
- def __init__(
- self,
- *,
- name: typing.Optional[str] = None,
- type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
- is_discriminator: bool = False,
- visibility: typing.Optional[typing.List[str]] = None,
- default: typing.Any = _UNSET,
- format: typing.Optional[str] = None,
- is_multipart_file_input: bool = False,
- xml: typing.Optional[typing.Dict[str, typing.Any]] = None,
- ):
- self._type = type
- self._rest_name_input = name
- self._module: typing.Optional[str] = None
- self._is_discriminator = is_discriminator
- self._visibility = visibility
- self._is_model = False
- self._default = default
- self._format = format
- self._is_multipart_file_input = is_multipart_file_input
- self._xml = xml if xml is not None else {}
-
- @property
- def _class_type(self) -> typing.Any:
- return getattr(self._type, "args", [None])[0]
-
- @property
- def _rest_name(self) -> str:
- if self._rest_name_input is None:
- raise ValueError("Rest name was never set")
- return self._rest_name_input
-
- def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin
- # by this point, type and rest_name will have a value bc we default
- # them in __new__ of the Model class
- item = obj.get(self._rest_name)
- if item is None:
- return item
- if self._is_model:
- return item
- return _deserialize(self._type, _serialize(item, self._format), rf=self)
-
- def __set__(self, obj: Model, value) -> None:
- if value is None:
- # we want to wipe out entries if users set attr to None
- try:
- obj.__delitem__(self._rest_name)
- except KeyError:
- pass
- return
- if self._is_model:
- if not _is_model(value):
- value = _deserialize(self._type, value)
- obj.__setitem__(self._rest_name, value)
- return
- obj.__setitem__(self._rest_name, _serialize(value, self._format))
-
- def _get_deserialize_callable_from_annotation(
- self, annotation: typing.Any
- ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]:
- return _get_deserialize_callable_from_annotation(annotation, self._module, self)
-
-
-def rest_field(
- *,
- name: typing.Optional[str] = None,
- type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
- visibility: typing.Optional[typing.List[str]] = None,
- default: typing.Any = _UNSET,
- format: typing.Optional[str] = None,
- is_multipart_file_input: bool = False,
- xml: typing.Optional[typing.Dict[str, typing.Any]] = None,
-) -> typing.Any:
- return _RestField(
- name=name,
- type=type,
- visibility=visibility,
- default=default,
- format=format,
- is_multipart_file_input=is_multipart_file_input,
- xml=xml,
- )
-
-
-def rest_discriminator(
- *,
- name: typing.Optional[str] = None,
- type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
- visibility: typing.Optional[typing.List[str]] = None,
- xml: typing.Optional[typing.Dict[str, typing.Any]] = None,
-) -> typing.Any:
- return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml)
-
-
-def serialize_xml(model: Model, exclude_readonly: bool = False) -> str:
- """Serialize a model to XML.
-
- :param Model model: The model to serialize.
- :param bool exclude_readonly: Whether to exclude readonly properties.
- :returns: The XML representation of the model.
- :rtype: str
- """
- return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore
-
-
-def _get_element(
- o: typing.Any,
- exclude_readonly: bool = False,
- parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None,
- wrapped_element: typing.Optional[ET.Element] = None,
-) -> typing.Union[ET.Element, typing.List[ET.Element]]:
- if _is_model(o):
- model_meta = getattr(o, "_xml", {})
-
- # if prop is a model, then use the prop element directly, else generate a wrapper of model
- if wrapped_element is None:
- wrapped_element = _create_xml_element(
- model_meta.get("name", o.__class__.__name__),
- model_meta.get("prefix"),
- model_meta.get("ns"),
- )
-
- readonly_props = []
- if exclude_readonly:
- readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)]
-
- for k, v in o.items():
- # do not serialize readonly properties
- if exclude_readonly and k in readonly_props:
- continue
-
- prop_rest_field = _get_rest_field(o._attr_to_rest_field, k)
- if prop_rest_field:
- prop_meta = getattr(prop_rest_field, "_xml").copy()
- # use the wire name as xml name if no specific name is set
- if prop_meta.get("name") is None:
- prop_meta["name"] = k
- else:
- # additional properties will not have rest field, use the wire name as xml name
- prop_meta = {"name": k}
-
- # if no ns for prop, use model's
- if prop_meta.get("ns") is None and model_meta.get("ns"):
- prop_meta["ns"] = model_meta.get("ns")
- prop_meta["prefix"] = model_meta.get("prefix")
-
- if prop_meta.get("unwrapped", False):
- # unwrapped could only set on array
- wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta))
- elif prop_meta.get("text", False):
- # text could only set on primitive type
- wrapped_element.text = _get_primitive_type_value(v)
- elif prop_meta.get("attribute", False):
- xml_name = prop_meta.get("name", k)
- if prop_meta.get("ns"):
- ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore
- xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore
- # attribute should be primitive type
- wrapped_element.set(xml_name, _get_primitive_type_value(v))
- else:
- # other wrapped prop element
- wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta))
- return wrapped_element
- if isinstance(o, list):
- return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore
- if isinstance(o, dict):
- result = []
- for k, v in o.items():
- result.append(
- _get_wrapped_element(
- v,
- exclude_readonly,
- {
- "name": k,
- "ns": parent_meta.get("ns") if parent_meta else None,
- "prefix": parent_meta.get("prefix") if parent_meta else None,
- },
- )
- )
- return result
-
- # primitive case need to create element based on parent_meta
- if parent_meta:
- return _get_wrapped_element(
- o,
- exclude_readonly,
- {
- "name": parent_meta.get("itemsName", parent_meta.get("name")),
- "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")),
- "ns": parent_meta.get("itemsNs", parent_meta.get("ns")),
- },
- )
-
- raise ValueError("Could not serialize value into xml: " + o)
-
-
-def _get_wrapped_element(
- v: typing.Any,
- exclude_readonly: bool,
- meta: typing.Optional[typing.Dict[str, typing.Any]],
-) -> ET.Element:
- wrapped_element = _create_xml_element(
- meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None
- )
- if isinstance(v, (dict, list)):
- wrapped_element.extend(_get_element(v, exclude_readonly, meta))
- elif _is_model(v):
- _get_element(v, exclude_readonly, meta, wrapped_element)
- else:
- wrapped_element.text = _get_primitive_type_value(v)
- return wrapped_element
-
-
-def _get_primitive_type_value(v) -> str:
- if v is True:
- return "true"
- if v is False:
- return "false"
- if isinstance(v, _Null):
- return ""
- return str(v)
-
-
-def _create_xml_element(tag, prefix=None, ns=None):
- if prefix and ns:
- ET.register_namespace(prefix, ns)
- if ns:
- return ET.Element("{" + ns + "}" + tag)
- return ET.Element(tag)
-
-
-def _deserialize_xml(
- deserializer: typing.Any,
- value: str,
-) -> typing.Any:
- element = ET.fromstring(value) # nosec
- return _deserialize(deserializer, element)
-
-
-def _convert_element(e: ET.Element):
- # dict case
- if len(e.attrib) > 0 or len({child.tag for child in e}) > 1:
- dict_result: typing.Dict[str, typing.Any] = {}
- for child in e:
- if dict_result.get(child.tag) is not None:
- if isinstance(dict_result[child.tag], list):
- dict_result[child.tag].append(_convert_element(child))
- else:
- dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)]
- else:
- dict_result[child.tag] = _convert_element(child)
- dict_result.update(e.attrib)
- return dict_result
- # array case
- if len(e) > 0:
- array_result: typing.List[typing.Any] = []
- for child in e:
- array_result.append(_convert_element(child))
- return array_result
- # primitive case
- return e.text
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/__init__.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/__init__.py
index d514f5e4b5be..79e1a2ccf3da 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/__init__.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/__init__.py
@@ -12,14 +12,12 @@
if TYPE_CHECKING:
from ._patch import * # pylint: disable=unused-wildcard-import
-from ._operations import KeyVaultClientOperationsMixin # type: ignore
+from ._operations import _KeyVaultClientOperationsMixin # type: ignore # pylint: disable=unused-import
from ._patch import __all__ as _patch_all
from ._patch import *
from ._patch import patch_sdk as _patch_sdk
-__all__ = [
- "KeyVaultClientOperationsMixin",
-]
+__all__ = []
__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/_operations.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/_operations.py
index c83ef9a31524..b87db9f85cb9 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/_operations.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/_operations.py
@@ -9,7 +9,7 @@
from collections.abc import MutableMapping
from io import IOBase
import json
-from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, overload
+from typing import Any, Callable, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core import PipelineClient
@@ -34,10 +34,11 @@
from .._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize
from .._utils.serialization import Serializer
from .._utils.utils import ClientMixinABC
+from .._validation import api_version_validation
JSON = MutableMapping[str, Any]
T = TypeVar("T")
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
@@ -48,7 +49,7 @@ def build_key_vault_set_secret_request(secret_name: str, **kwargs: Any) -> HttpR
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -74,7 +75,7 @@ def build_key_vault_delete_secret_request(secret_name: str, **kwargs: Any) -> Ht
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -99,7 +100,7 @@ def build_key_vault_update_secret_request(secret_name: str, secret_version: str,
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -122,11 +123,17 @@ def build_key_vault_update_secret_request(secret_name: str, secret_version: str,
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
-def build_key_vault_get_secret_request(secret_name: str, secret_version: str, **kwargs: Any) -> HttpRequest:
+def build_key_vault_get_secret_request(
+ secret_name: str,
+ secret_version: str,
+ *,
+ out_content_type: Optional[Union[str, _models.ContentType]] = None,
+ **kwargs: Any
+) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -139,6 +146,8 @@ def build_key_vault_get_secret_request(secret_name: str, secret_version: str, **
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
+ if out_content_type is not None:
+ _params["outContentType"] = _SERIALIZER.query("out_content_type", out_content_type, "str")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
@@ -151,7 +160,7 @@ def build_key_vault_get_secrets_request(*, maxresults: Optional[int] = None, **k
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -174,7 +183,7 @@ def build_key_vault_get_secret_versions_request( # pylint: disable=name-too-lon
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -202,7 +211,7 @@ def build_key_vault_get_deleted_secrets_request( # pylint: disable=name-too-lon
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -225,7 +234,7 @@ def build_key_vault_get_deleted_secret_request( # pylint: disable=name-too-long
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -248,12 +257,9 @@ def build_key_vault_get_deleted_secret_request( # pylint: disable=name-too-long
def build_key_vault_purge_deleted_secret_request( # pylint: disable=name-too-long
secret_name: str, **kwargs: Any
) -> HttpRequest:
- _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6"))
- accept = _headers.pop("Accept", "application/json")
-
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01"))
# Construct URL
_url = "/deletedsecrets/{secret-name}"
path_format_arguments = {
@@ -265,10 +271,7 @@ def build_key_vault_purge_deleted_secret_request( # pylint: disable=name-too-lo
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
- # Construct headers
- _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
-
- return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
+ return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_key_vault_recover_deleted_secret_request( # pylint: disable=name-too-long
@@ -277,7 +280,7 @@ def build_key_vault_recover_deleted_secret_request( # pylint: disable=name-too-
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -301,7 +304,7 @@ def build_key_vault_backup_secret_request(secret_name: str, **kwargs: Any) -> Ht
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -326,7 +329,7 @@ def build_key_vault_restore_secret_request(**kwargs: Any) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -343,7 +346,7 @@ def build_key_vault_restore_secret_request(**kwargs: Any) -> HttpRequest:
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
-class KeyVaultClientOperationsMixin(
+class _KeyVaultClientOperationsMixin(
ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], KeyVaultClientConfiguration]
):
@@ -482,6 +485,7 @@ def set_secret(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -496,11 +500,14 @@ def set_secret(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.SecretBundle, response.json())
@@ -548,6 +555,7 @@ def delete_secret(self, secret_name: str, **kwargs: Any) -> _models.DeletedSecre
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -562,11 +570,14 @@ def delete_secret(self, secret_name: str, **kwargs: Any) -> _models.DeletedSecre
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.DeletedSecretBundle, response.json())
@@ -728,6 +739,7 @@ def update_secret(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -742,11 +754,14 @@ def update_secret(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.SecretBundle, response.json())
@@ -756,7 +771,18 @@ def update_secret(
return deserialized # type: ignore
@distributed_trace
- def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any) -> _models.SecretBundle:
+ @api_version_validation(
+ params_added_on={"2025-06-01-preview": ["out_content_type"]},
+ api_versions_list=["7.5", "7.6-preview.2", "7.6", "2025-06-01-preview", "2025-07-01"],
+ )
+ def get_secret(
+ self,
+ secret_name: str,
+ secret_version: str,
+ *,
+ out_content_type: Optional[Union[str, _models.ContentType]] = None,
+ **kwargs: Any
+ ) -> _models.SecretBundle:
"""Get a specified secret from a given key vault.
The GET operation is applicable to any secret stored in Azure Key Vault. This operation
@@ -767,6 +793,12 @@ def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any) -> _m
:param secret_version: The version of the secret. This URI fragment is optional. If not
specified, the latest version of the secret is returned. Required.
:type secret_version: str
+ :keyword out_content_type: The media type (MIME type) of the certificate. If a supported format
+ is specified, the certificate content is converted to the requested format. Currently, only PFX
+ to PEM conversion is supported. If an unsupported format is specified, the request is rejected.
+ If not specified, the certificate is returned in its original format without conversion. Known
+ values are: "application/x-pkcs12" and "application/x-pem-file". Default value is None.
+ :paramtype out_content_type: str or ~azure.keyvault.secrets._generated.models.ContentType
:return: SecretBundle. The SecretBundle is compatible with MutableMapping
:rtype: ~azure.keyvault.secrets._generated.models.SecretBundle
:raises ~azure.core.exceptions.HttpResponseError:
@@ -787,6 +819,7 @@ def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any) -> _m
_request = build_key_vault_get_secret_request(
secret_name=secret_name,
secret_version=secret_version,
+ out_content_type=out_content_type,
api_version=self._config.api_version,
headers=_headers,
params=_params,
@@ -798,6 +831,7 @@ def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any) -> _m
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -812,11 +846,14 @@ def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any) -> _m
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.SecretBundle, response.json())
@@ -826,7 +863,7 @@ def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any) -> _m
return deserialized # type: ignore
@distributed_trace
- def get_secrets(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> Iterable["_models.SecretItem"]:
+ def get_secrets(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> ItemPaged["_models.SecretItem"]:
"""List secrets in a specified key vault.
The Get Secrets operation is applicable to the entire vault. However, only the base secret
@@ -843,7 +880,7 @@ def get_secrets(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> Ite
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
- cls: ClsType[List[_models.SecretItem]] = kwargs.pop("cls", None)
+ cls: ClsType[list[_models.SecretItem]] = kwargs.pop("cls", None)
error_map: MutableMapping = {
401: ClientAuthenticationError,
@@ -893,7 +930,10 @@ def prepare_request(next_link=None):
def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.SecretItem], deserialized.get("value", []))
+ list_of_elem = _deserialize(
+ list[_models.SecretItem],
+ deserialized.get("value", []),
+ )
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, iter(list_of_elem)
@@ -909,7 +949,10 @@ def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
return pipeline_response
@@ -919,7 +962,7 @@ def get_next(next_link=None):
@distributed_trace
def get_secret_versions(
self, secret_name: str, *, maxresults: Optional[int] = None, **kwargs: Any
- ) -> Iterable["_models.SecretItem"]:
+ ) -> ItemPaged["_models.SecretItem"]:
"""List all versions of the specified secret.
The full secret identifier and attributes are provided in the response. No values are returned
@@ -937,7 +980,7 @@ def get_secret_versions(
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
- cls: ClsType[List[_models.SecretItem]] = kwargs.pop("cls", None)
+ cls: ClsType[list[_models.SecretItem]] = kwargs.pop("cls", None)
error_map: MutableMapping = {
401: ClientAuthenticationError,
@@ -988,7 +1031,10 @@ def prepare_request(next_link=None):
def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.SecretItem], deserialized.get("value", []))
+ list_of_elem = _deserialize(
+ list[_models.SecretItem],
+ deserialized.get("value", []),
+ )
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, iter(list_of_elem)
@@ -1004,7 +1050,10 @@ def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
return pipeline_response
@@ -1014,7 +1063,7 @@ def get_next(next_link=None):
@distributed_trace
def get_deleted_secrets(
self, *, maxresults: Optional[int] = None, **kwargs: Any
- ) -> Iterable["_models.DeletedSecretItem"]:
+ ) -> ItemPaged["_models.DeletedSecretItem"]:
"""Lists deleted secrets for the specified vault.
The Get Deleted Secrets operation returns the secrets that have been deleted for a vault
@@ -1031,7 +1080,7 @@ def get_deleted_secrets(
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
- cls: ClsType[List[_models.DeletedSecretItem]] = kwargs.pop("cls", None)
+ cls: ClsType[list[_models.DeletedSecretItem]] = kwargs.pop("cls", None)
error_map: MutableMapping = {
401: ClientAuthenticationError,
@@ -1081,7 +1130,10 @@ def prepare_request(next_link=None):
def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.DeletedSecretItem], deserialized.get("value", []))
+ list_of_elem = _deserialize(
+ list[_models.DeletedSecretItem],
+ deserialized.get("value", []),
+ )
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, iter(list_of_elem)
@@ -1097,7 +1149,10 @@ def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
return pipeline_response
@@ -1143,6 +1198,7 @@ def get_deleted_secret(self, secret_name: str, **kwargs: Any) -> _models.Deleted
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1157,11 +1213,14 @@ def get_deleted_secret(self, secret_name: str, **kwargs: Any) -> _models.Deleted
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.DeletedSecretBundle, response.json())
@@ -1221,7 +1280,10 @@ def purge_deleted_secret( # pylint: disable=inconsistent-return-statements
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if cls:
@@ -1266,6 +1328,7 @@ def recover_deleted_secret(self, secret_name: str, **kwargs: Any) -> _models.Sec
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1280,11 +1343,14 @@ def recover_deleted_secret(self, secret_name: str, **kwargs: Any) -> _models.Sec
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.SecretBundle, response.json())
@@ -1332,6 +1398,7 @@ def backup_secret(self, secret_name: str, **kwargs: Any) -> _models.BackupSecret
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1346,11 +1413,14 @@ def backup_secret(self, secret_name: str, **kwargs: Any) -> _models.BackupSecret
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.BackupSecretResult, response.json())
@@ -1468,6 +1538,7 @@ def restore_secret(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1482,11 +1553,14 @@ def restore_secret(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.SecretBundle, response.json())
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/_patch.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/_patch.py
index f7dd32510333..87676c65a8f0 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/_patch.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/_patch.py
@@ -1,14 +1,15 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
"""Customize generated code here.
Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
-from typing import List
-__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
def patch_sdk():
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_patch.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_patch.py
index f7dd32510333..87676c65a8f0 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_patch.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_patch.py
@@ -1,14 +1,15 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
"""Customize generated code here.
Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
-from typing import List
-__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
def patch_sdk():
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_serialization.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_serialization.py
deleted file mode 100644
index ef86f1415163..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_serialization.py
+++ /dev/null
@@ -1,2050 +0,0 @@
-# pylint: disable=too-many-lines,line-too-long,useless-suppression
-# --------------------------------------------------------------------------
-#
-# Copyright (c) Microsoft Corporation. All rights reserved.
-#
-# The MIT License (MIT)
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the ""Software""), to
-# deal in the Software without restriction, including without limitation the
-# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-# sell copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-# IN THE SOFTWARE.
-#
-# --------------------------------------------------------------------------
-
-# pyright: reportUnnecessaryTypeIgnoreComment=false
-
-from base64 import b64decode, b64encode
-import calendar
-import datetime
-import decimal
-import email
-from enum import Enum
-import json
-import logging
-import re
-import sys
-import codecs
-from typing import (
- Dict,
- Any,
- cast,
- Optional,
- Union,
- AnyStr,
- IO,
- Mapping,
- Callable,
- MutableMapping,
- List,
-)
-
-try:
- from urllib import quote # type: ignore
-except ImportError:
- from urllib.parse import quote
-import xml.etree.ElementTree as ET
-
-import isodate # type: ignore
-from typing_extensions import Self
-
-from azure.core.exceptions import DeserializationError, SerializationError
-from azure.core.serialization import NULL as CoreNull
-
-_BOM = codecs.BOM_UTF8.decode(encoding="utf-8")
-
-JSON = MutableMapping[str, Any]
-
-
-class RawDeserializer:
-
- # Accept "text" because we're open minded people...
- JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$")
-
- # Name used in context
- CONTEXT_NAME = "deserialized_data"
-
- @classmethod
- def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any:
- """Decode data according to content-type.
-
- Accept a stream of data as well, but will be load at once in memory for now.
-
- If no content-type, will return the string version (not bytes, not stream)
-
- :param data: Input, could be bytes or stream (will be decoded with UTF8) or text
- :type data: str or bytes or IO
- :param str content_type: The content type.
- :return: The deserialized data.
- :rtype: object
- """
- if hasattr(data, "read"):
- # Assume a stream
- data = cast(IO, data).read()
-
- if isinstance(data, bytes):
- data_as_str = data.decode(encoding="utf-8-sig")
- else:
- # Explain to mypy the correct type.
- data_as_str = cast(str, data)
-
- # Remove Byte Order Mark if present in string
- data_as_str = data_as_str.lstrip(_BOM)
-
- if content_type is None:
- return data
-
- if cls.JSON_REGEXP.match(content_type):
- try:
- return json.loads(data_as_str)
- except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err) from err
- elif "xml" in (content_type or []):
- try:
-
- try:
- if isinstance(data, unicode): # type: ignore
- # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string
- data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore
- except NameError:
- pass
-
- return ET.fromstring(data_as_str) # nosec
- except ET.ParseError as err:
- # It might be because the server has an issue, and returned JSON with
- # content-type XML....
- # So let's try a JSON load, and if it's still broken
- # let's flow the initial exception
- def _json_attemp(data):
- try:
- return True, json.loads(data)
- except ValueError:
- return False, None # Don't care about this one
-
- success, json_result = _json_attemp(data)
- if success:
- return json_result
- # If i'm here, it's not JSON, it's not XML, let's scream
- # and raise the last context in this block (the XML exception)
- # The function hack is because Py2.7 messes up with exception
- # context otherwise.
- _LOGGER.critical("Wasn't XML not JSON, failing")
- raise DeserializationError("XML is invalid") from err
- elif content_type.startswith("text/"):
- return data_as_str
- raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
-
- @classmethod
- def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any:
- """Deserialize from HTTP response.
-
- Use bytes and headers to NOT use any requests/aiohttp or whatever
- specific implementation.
- Headers will tested for "content-type"
-
- :param bytes body_bytes: The body of the response.
- :param dict headers: The headers of the response.
- :returns: The deserialized data.
- :rtype: object
- """
- # Try to use content-type from headers if available
- content_type = None
- if "content-type" in headers:
- content_type = headers["content-type"].split(";")[0].strip().lower()
- # Ouch, this server did not declare what it sent...
- # Let's guess it's JSON...
- # Also, since Autorest was considering that an empty body was a valid JSON,
- # need that test as well....
- else:
- content_type = "application/json"
-
- if body_bytes:
- return cls.deserialize_from_text(body_bytes, content_type)
- return None
-
-
-_LOGGER = logging.getLogger(__name__)
-
-try:
- _long_type = long # type: ignore
-except NameError:
- _long_type = int
-
-TZ_UTC = datetime.timezone.utc
-
-_FLATTEN = re.compile(r"(? None:
- self.additional_properties: Optional[Dict[str, Any]] = {}
- for k in kwargs: # pylint: disable=consider-using-dict-items
- if k not in self._attribute_map:
- _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
- elif k in self._validation and self._validation[k].get("readonly", False):
- _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__)
- else:
- setattr(self, k, kwargs[k])
-
- def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes.
-
- :param object other: The object to compare
- :returns: True if objects are equal
- :rtype: bool
- """
- if isinstance(other, self.__class__):
- return self.__dict__ == other.__dict__
- return False
-
- def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes.
-
- :param object other: The object to compare
- :returns: True if objects are not equal
- :rtype: bool
- """
- return not self.__eq__(other)
-
- def __str__(self) -> str:
- return str(self.__dict__)
-
- @classmethod
- def enable_additional_properties_sending(cls) -> None:
- cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"}
-
- @classmethod
- def is_xml_model(cls) -> bool:
- try:
- cls._xml_map # type: ignore
- except AttributeError:
- return False
- return True
-
- @classmethod
- def _create_xml_node(cls):
- """Create XML node.
-
- :returns: The XML node
- :rtype: xml.etree.ElementTree.Element
- """
- try:
- xml_map = cls._xml_map # type: ignore
- except AttributeError:
- xml_map = {}
-
- return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None))
-
- def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
- """Return the JSON that would be sent to server from this model.
-
- This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`.
-
- If you want XML serialization, you can pass the kwargs is_xml=True.
-
- :param bool keep_readonly: If you want to serialize the readonly attributes
- :returns: A dict JSON compatible object
- :rtype: dict
- """
- serializer = Serializer(self._infer_class_models())
- return serializer._serialize( # type: ignore # pylint: disable=protected-access
- self, keep_readonly=keep_readonly, **kwargs
- )
-
- def as_dict(
- self,
- keep_readonly: bool = True,
- key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer,
- **kwargs: Any
- ) -> JSON:
- """Return a dict that can be serialized using json.dump.
-
- Advanced usage might optionally use a callback as parameter:
-
- .. code::python
-
- def my_key_transformer(key, attr_desc, value):
- return key
-
- Key is the attribute name used in Python. Attr_desc
- is a dict of metadata. Currently contains 'type' with the
- msrest type and 'key' with the RestAPI encoded key.
- Value is the current value in this object.
-
- The string returned will be used to serialize the key.
- If the return type is a list, this is considered hierarchical
- result dict.
-
- See the three examples in this file:
-
- - attribute_transformer
- - full_restapi_key_transformer
- - last_restapi_key_transformer
-
- If you want XML serialization, you can pass the kwargs is_xml=True.
-
- :param bool keep_readonly: If you want to serialize the readonly attributes
- :param function key_transformer: A key transformer function.
- :returns: A dict JSON compatible object
- :rtype: dict
- """
- serializer = Serializer(self._infer_class_models())
- return serializer._serialize( # type: ignore # pylint: disable=protected-access
- self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
- )
-
- @classmethod
- def _infer_class_models(cls):
- try:
- str_models = cls.__module__.rsplit(".", 1)[0]
- models = sys.modules[str_models]
- client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
- if cls.__name__ not in client_models:
- raise ValueError("Not Autorest generated code")
- except Exception: # pylint: disable=broad-exception-caught
- # Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
- client_models = {cls.__name__: cls}
- return client_models
-
- @classmethod
- def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self:
- """Parse a str using the RestAPI syntax and return a model.
-
- :param str data: A str using RestAPI structure. JSON by default.
- :param str content_type: JSON by default, set application/xml if XML.
- :returns: An instance of this model
- :raises DeserializationError: if something went wrong
- :rtype: Self
- """
- deserializer = Deserializer(cls._infer_class_models())
- return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
-
- @classmethod
- def from_dict(
- cls,
- data: Any,
- key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None,
- content_type: Optional[str] = None,
- ) -> Self:
- """Parse a dict using given key extractor return a model.
-
- By default consider key
- extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor
- and last_rest_key_case_insensitive_extractor)
-
- :param dict data: A dict using RestAPI structure
- :param function key_extractors: A key extractor function.
- :param str content_type: JSON by default, set application/xml if XML.
- :returns: An instance of this model
- :raises: DeserializationError if something went wrong
- :rtype: Self
- """
- deserializer = Deserializer(cls._infer_class_models())
- deserializer.key_extractors = ( # type: ignore
- [ # type: ignore
- attribute_key_case_insensitive_extractor,
- rest_key_case_insensitive_extractor,
- last_rest_key_case_insensitive_extractor,
- ]
- if key_extractors is None
- else key_extractors
- )
- return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
-
- @classmethod
- def _flatten_subtype(cls, key, objects):
- if "_subtype_map" not in cls.__dict__:
- return {}
- result = dict(cls._subtype_map[key])
- for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
- return result
-
- @classmethod
- def _classify(cls, response, objects):
- """Check the class _subtype_map for any child classes.
- We want to ignore any inherited _subtype_maps.
-
- :param dict response: The initial data
- :param dict objects: The class objects
- :returns: The class to be used
- :rtype: class
- """
- for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
- subtype_value = None
-
- if not isinstance(response, ET.Element):
- rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
- else:
- subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
- if subtype_value:
- # Try to match base class. Can be class name only
- # (bug to fix in Autorest to support x-ms-discriminator-name)
- if cls.__name__ == subtype_value:
- return cls
- flatten_mapping_type = cls._flatten_subtype(subtype_key, objects)
- try:
- return objects[flatten_mapping_type[subtype_value]] # type: ignore
- except KeyError:
- _LOGGER.warning(
- "Subtype value %s has no mapping, use base class %s.",
- subtype_value,
- cls.__name__,
- )
- break
- else:
- _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__)
- break
- return cls
-
- @classmethod
- def _get_rest_key_parts(cls, attr_key):
- """Get the RestAPI key of this attr, split it and decode part
- :param str attr_key: Attribute key must be in attribute_map.
- :returns: A list of RestAPI part
- :rtype: list
- """
- rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"])
- return [_decode_attribute_map_key(key_part) for key_part in rest_split_key]
-
-
-def _decode_attribute_map_key(key):
- """This decode a key in an _attribute_map to the actual key we want to look at
- inside the received data.
-
- :param str key: A key string from the generated code
- :returns: The decoded key
- :rtype: str
- """
- return key.replace("\\.", ".")
-
-
-class Serializer: # pylint: disable=too-many-public-methods
- """Request object model serializer."""
-
- basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
-
- _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()}
- days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"}
- months = {
- 1: "Jan",
- 2: "Feb",
- 3: "Mar",
- 4: "Apr",
- 5: "May",
- 6: "Jun",
- 7: "Jul",
- 8: "Aug",
- 9: "Sep",
- 10: "Oct",
- 11: "Nov",
- 12: "Dec",
- }
- validation = {
- "min_length": lambda x, y: len(x) < y,
- "max_length": lambda x, y: len(x) > y,
- "minimum": lambda x, y: x < y,
- "maximum": lambda x, y: x > y,
- "minimum_ex": lambda x, y: x <= y,
- "maximum_ex": lambda x, y: x >= y,
- "min_items": lambda x, y: len(x) < y,
- "max_items": lambda x, y: len(x) > y,
- "pattern": lambda x, y: not re.match(y, x, re.UNICODE),
- "unique": lambda x, y: len(x) != len(set(x)),
- "multiple": lambda x, y: x % y != 0,
- }
-
- def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
- self.serialize_type = {
- "iso-8601": Serializer.serialize_iso,
- "rfc-1123": Serializer.serialize_rfc,
- "unix-time": Serializer.serialize_unix,
- "duration": Serializer.serialize_duration,
- "date": Serializer.serialize_date,
- "time": Serializer.serialize_time,
- "decimal": Serializer.serialize_decimal,
- "long": Serializer.serialize_long,
- "bytearray": Serializer.serialize_bytearray,
- "base64": Serializer.serialize_base64,
- "object": self.serialize_object,
- "[]": self.serialize_iter,
- "{}": self.serialize_dict,
- }
- self.dependencies: Dict[str, type] = dict(classes) if classes else {}
- self.key_transformer = full_restapi_key_transformer
- self.client_side_validation = True
-
- def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
- self, target_obj, data_type=None, **kwargs
- ):
- """Serialize data into a string according to type.
-
- :param object target_obj: The data to be serialized.
- :param str data_type: The type to be serialized from.
- :rtype: str, dict
- :raises SerializationError: if serialization fails.
- :returns: The serialized data.
- """
- key_transformer = kwargs.get("key_transformer", self.key_transformer)
- keep_readonly = kwargs.get("keep_readonly", False)
- if target_obj is None:
- return None
-
- attr_name = None
- class_name = target_obj.__class__.__name__
-
- if data_type:
- return self.serialize_data(target_obj, data_type, **kwargs)
-
- if not hasattr(target_obj, "_attribute_map"):
- data_type = type(target_obj).__name__
- if data_type in self.basic_types.values():
- return self.serialize_data(target_obj, data_type, **kwargs)
-
- # Force "is_xml" kwargs if we detect a XML model
- try:
- is_xml_model_serialization = kwargs["is_xml"]
- except KeyError:
- is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model())
-
- serialized = {}
- if is_xml_model_serialization:
- serialized = target_obj._create_xml_node() # pylint: disable=protected-access
- try:
- attributes = target_obj._attribute_map # pylint: disable=protected-access
- for attr, attr_desc in attributes.items():
- attr_name = attr
- if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
- attr_name, {}
- ).get("readonly", False):
- continue
-
- if attr_name == "additional_properties" and attr_desc["key"] == "":
- if target_obj.additional_properties is not None:
- serialized.update(target_obj.additional_properties)
- continue
- try:
-
- orig_attr = getattr(target_obj, attr)
- if is_xml_model_serialization:
- pass # Don't provide "transformer" for XML for now. Keep "orig_attr"
- else: # JSON
- keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr)
- keys = keys if isinstance(keys, list) else [keys]
-
- kwargs["serialization_ctxt"] = attr_desc
- new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs)
-
- if is_xml_model_serialization:
- xml_desc = attr_desc.get("xml", {})
- xml_name = xml_desc.get("name", attr_desc["key"])
- xml_prefix = xml_desc.get("prefix", None)
- xml_ns = xml_desc.get("ns", None)
- if xml_desc.get("attr", False):
- if xml_ns:
- ET.register_namespace(xml_prefix, xml_ns)
- xml_name = "{{{}}}{}".format(xml_ns, xml_name)
- serialized.set(xml_name, new_attr) # type: ignore
- continue
- if xml_desc.get("text", False):
- serialized.text = new_attr # type: ignore
- continue
- if isinstance(new_attr, list):
- serialized.extend(new_attr) # type: ignore
- elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name,
- # we MUST replace the tag with the local tag. But keeping the namespaces.
- if "name" not in getattr(orig_attr, "_xml_map", {}):
- splitted_tag = new_attr.tag.split("}")
- if len(splitted_tag) == 2: # Namespace
- new_attr.tag = "}".join([splitted_tag[0], xml_name])
- else:
- new_attr.tag = xml_name
- serialized.append(new_attr) # type: ignore
- else: # That's a basic type
- # Integrate namespace if necessary
- local_node = _create_xml_node(xml_name, xml_prefix, xml_ns)
- local_node.text = str(new_attr)
- serialized.append(local_node) # type: ignore
- else: # JSON
- for k in reversed(keys): # type: ignore
- new_attr = {k: new_attr}
-
- _new_attr = new_attr
- _serialized = serialized
- for k in keys: # type: ignore
- if k not in _serialized:
- _serialized.update(_new_attr) # type: ignore
- _new_attr = _new_attr[k] # type: ignore
- _serialized = _serialized[k]
- except ValueError as err:
- if isinstance(err, SerializationError):
- raise
-
- except (AttributeError, KeyError, TypeError) as err:
- msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
- raise SerializationError(msg) from err
- return serialized
-
- def body(self, data, data_type, **kwargs):
- """Serialize data intended for a request body.
-
- :param object data: The data to be serialized.
- :param str data_type: The type to be serialized from.
- :rtype: dict
- :raises SerializationError: if serialization fails.
- :raises ValueError: if data is None
- :returns: The serialized request body
- """
-
- # Just in case this is a dict
- internal_data_type_str = data_type.strip("[]{}")
- internal_data_type = self.dependencies.get(internal_data_type_str, None)
- try:
- is_xml_model_serialization = kwargs["is_xml"]
- except KeyError:
- if internal_data_type and issubclass(internal_data_type, Model):
- is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model())
- else:
- is_xml_model_serialization = False
- if internal_data_type and not isinstance(internal_data_type, Enum):
- try:
- deserializer = Deserializer(self.dependencies)
- # Since it's on serialization, it's almost sure that format is not JSON REST
- # We're not able to deal with additional properties for now.
- deserializer.additional_properties_detection = False
- if is_xml_model_serialization:
- deserializer.key_extractors = [ # type: ignore
- attribute_key_case_insensitive_extractor,
- ]
- else:
- deserializer.key_extractors = [
- rest_key_case_insensitive_extractor,
- attribute_key_case_insensitive_extractor,
- last_rest_key_case_insensitive_extractor,
- ]
- data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
- except DeserializationError as err:
- raise SerializationError("Unable to build a model: " + str(err)) from err
-
- return self._serialize(data, data_type, **kwargs)
-
- def url(self, name, data, data_type, **kwargs):
- """Serialize data intended for a URL path.
-
- :param str name: The name of the URL path parameter.
- :param object data: The data to be serialized.
- :param str data_type: The type to be serialized from.
- :rtype: str
- :returns: The serialized URL path
- :raises TypeError: if serialization fails.
- :raises ValueError: if data is None
- """
- try:
- output = self.serialize_data(data, data_type, **kwargs)
- if data_type == "bool":
- output = json.dumps(output)
-
- if kwargs.get("skip_quote") is True:
- output = str(output)
- output = output.replace("{", quote("{")).replace("}", quote("}"))
- else:
- output = quote(str(output), safe="")
- except SerializationError as exc:
- raise TypeError("{} must be type {}.".format(name, data_type)) from exc
- return output
-
- def query(self, name, data, data_type, **kwargs):
- """Serialize data intended for a URL query.
-
- :param str name: The name of the query parameter.
- :param object data: The data to be serialized.
- :param str data_type: The type to be serialized from.
- :rtype: str, list
- :raises TypeError: if serialization fails.
- :raises ValueError: if data is None
- :returns: The serialized query parameter
- """
- try:
- # Treat the list aside, since we don't want to encode the div separator
- if data_type.startswith("["):
- internal_data_type = data_type[1:-1]
- do_quote = not kwargs.get("skip_quote", False)
- return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)
-
- # Not a list, regular serialization
- output = self.serialize_data(data, data_type, **kwargs)
- if data_type == "bool":
- output = json.dumps(output)
- if kwargs.get("skip_quote") is True:
- output = str(output)
- else:
- output = quote(str(output), safe="")
- except SerializationError as exc:
- raise TypeError("{} must be type {}.".format(name, data_type)) from exc
- return str(output)
-
- def header(self, name, data, data_type, **kwargs):
- """Serialize data intended for a request header.
-
- :param str name: The name of the header.
- :param object data: The data to be serialized.
- :param str data_type: The type to be serialized from.
- :rtype: str
- :raises TypeError: if serialization fails.
- :raises ValueError: if data is None
- :returns: The serialized header
- """
- try:
- if data_type in ["[str]"]:
- data = ["" if d is None else d for d in data]
-
- output = self.serialize_data(data, data_type, **kwargs)
- if data_type == "bool":
- output = json.dumps(output)
- except SerializationError as exc:
- raise TypeError("{} must be type {}.".format(name, data_type)) from exc
- return str(output)
-
- def serialize_data(self, data, data_type, **kwargs):
- """Serialize generic data according to supplied data type.
-
- :param object data: The data to be serialized.
- :param str data_type: The type to be serialized from.
- :raises AttributeError: if required data is None.
- :raises ValueError: if data is None
- :raises SerializationError: if serialization fails.
- :returns: The serialized data.
- :rtype: str, int, float, bool, dict, list
- """
- if data is None:
- raise ValueError("No value for given attribute")
-
- try:
- if data is CoreNull:
- return None
- if data_type in self.basic_types.values():
- return self.serialize_basic(data, data_type, **kwargs)
-
- if data_type in self.serialize_type:
- return self.serialize_type[data_type](data, **kwargs)
-
- # If dependencies is empty, try with current data class
- # It has to be a subclass of Enum anyway
- enum_type = self.dependencies.get(data_type, data.__class__)
- if issubclass(enum_type, Enum):
- return Serializer.serialize_enum(data, enum_obj=enum_type)
-
- iter_type = data_type[0] + data_type[-1]
- if iter_type in self.serialize_type:
- return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs)
-
- except (ValueError, TypeError) as err:
- msg = "Unable to serialize value: {!r} as type: {!r}."
- raise SerializationError(msg.format(data, data_type)) from err
- return self._serialize(data, **kwargs)
-
- @classmethod
- def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
- custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
- if custom_serializer:
- return custom_serializer
- if kwargs.get("is_xml", False):
- return cls._xml_basic_types_serializers.get(data_type)
-
- @classmethod
- def serialize_basic(cls, data, data_type, **kwargs):
- """Serialize basic builting data type.
- Serializes objects to str, int, float or bool.
-
- Possible kwargs:
- - basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- - is_xml bool : If set, use xml_basic_types_serializers
-
- :param obj data: Object to be serialized.
- :param str data_type: Type of object in the iterable.
- :rtype: str, int, float, bool
- :return: serialized object
- """
- custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
- if custom_serializer:
- return custom_serializer(data)
- if data_type == "str":
- return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec # pylint: disable=eval-used
-
- @classmethod
- def serialize_unicode(cls, data):
- """Special handling for serializing unicode strings in Py2.
- Encode to UTF-8 if unicode, otherwise handle as a str.
-
- :param str data: Object to be serialized.
- :rtype: str
- :return: serialized object
- """
- try: # If I received an enum, return its value
- return data.value
- except AttributeError:
- pass
-
- try:
- if isinstance(data, unicode): # type: ignore
- # Don't change it, JSON and XML ElementTree are totally able
- # to serialize correctly u'' strings
- return data
- except NameError:
- return str(data)
- return str(data)
-
- def serialize_iter(self, data, iter_type, div=None, **kwargs):
- """Serialize iterable.
-
- Supported kwargs:
- - serialization_ctxt dict : The current entry of _attribute_map, or same format.
- serialization_ctxt['type'] should be same as data_type.
- - is_xml bool : If set, serialize as XML
-
- :param list data: Object to be serialized.
- :param str iter_type: Type of object in the iterable.
- :param str div: If set, this str will be used to combine the elements
- in the iterable into a combined string. Default is 'None'.
- Defaults to False.
- :rtype: list, str
- :return: serialized iterable
- """
- if isinstance(data, str):
- raise SerializationError("Refuse str type as a valid iter type.")
-
- serialization_ctxt = kwargs.get("serialization_ctxt", {})
- is_xml = kwargs.get("is_xml", False)
-
- serialized = []
- for d in data:
- try:
- serialized.append(self.serialize_data(d, iter_type, **kwargs))
- except ValueError as err:
- if isinstance(err, SerializationError):
- raise
- serialized.append(None)
-
- if kwargs.get("do_quote", False):
- serialized = ["" if s is None else quote(str(s), safe="") for s in serialized]
-
- if div:
- serialized = ["" if s is None else str(s) for s in serialized]
- serialized = div.join(serialized)
-
- if "xml" in serialization_ctxt or is_xml:
- # XML serialization is more complicated
- xml_desc = serialization_ctxt.get("xml", {})
- xml_name = xml_desc.get("name")
- if not xml_name:
- xml_name = serialization_ctxt["key"]
-
- # Create a wrap node if necessary (use the fact that Element and list have "append")
- is_wrapped = xml_desc.get("wrapped", False)
- node_name = xml_desc.get("itemsName", xml_name)
- if is_wrapped:
- final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
- else:
- final_result = []
- # All list elements to "local_node"
- for el in serialized:
- if isinstance(el, ET.Element):
- el_node = el
- else:
- el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
- if el is not None: # Otherwise it writes "None" :-p
- el_node.text = str(el)
- final_result.append(el_node)
- return final_result
- return serialized
-
- def serialize_dict(self, attr, dict_type, **kwargs):
- """Serialize a dictionary of objects.
-
- :param dict attr: Object to be serialized.
- :param str dict_type: Type of object in the dictionary.
- :rtype: dict
- :return: serialized dictionary
- """
- serialization_ctxt = kwargs.get("serialization_ctxt", {})
- serialized = {}
- for key, value in attr.items():
- try:
- serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs)
- except ValueError as err:
- if isinstance(err, SerializationError):
- raise
- serialized[self.serialize_unicode(key)] = None
-
- if "xml" in serialization_ctxt:
- # XML serialization is more complicated
- xml_desc = serialization_ctxt["xml"]
- xml_name = xml_desc["name"]
-
- final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
- for key, value in serialized.items():
- ET.SubElement(final_result, key).text = value
- return final_result
-
- return serialized
-
- def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
- """Serialize a generic object.
- This will be handled as a dictionary. If object passed in is not
- a basic type (str, int, float, dict, list) it will simply be
- cast to str.
-
- :param dict attr: Object to be serialized.
- :rtype: dict or str
- :return: serialized object
- """
- if attr is None:
- return None
- if isinstance(attr, ET.Element):
- return attr
- obj_type = type(attr)
- if obj_type in self.basic_types:
- return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs)
- if obj_type is _long_type:
- return self.serialize_long(attr)
- if obj_type is str:
- return self.serialize_unicode(attr)
- if obj_type is datetime.datetime:
- return self.serialize_iso(attr)
- if obj_type is datetime.date:
- return self.serialize_date(attr)
- if obj_type is datetime.time:
- return self.serialize_time(attr)
- if obj_type is datetime.timedelta:
- return self.serialize_duration(attr)
- if obj_type is decimal.Decimal:
- return self.serialize_decimal(attr)
-
- # If it's a model or I know this dependency, serialize as a Model
- if obj_type in self.dependencies.values() or isinstance(attr, Model):
- return self._serialize(attr)
-
- if obj_type == dict:
- serialized = {}
- for key, value in attr.items():
- try:
- serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs)
- except ValueError:
- serialized[self.serialize_unicode(key)] = None
- return serialized
-
- if obj_type == list:
- serialized = []
- for obj in attr:
- try:
- serialized.append(self.serialize_object(obj, **kwargs))
- except ValueError:
- pass
- return serialized
- return str(attr)
-
- @staticmethod
- def serialize_enum(attr, enum_obj=None):
- try:
- result = attr.value
- except AttributeError:
- result = attr
- try:
- enum_obj(result) # type: ignore
- return result
- except ValueError as exc:
- for enum_value in enum_obj: # type: ignore
- if enum_value.value.lower() == str(attr).lower():
- return enum_value.value
- error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj)) from exc
-
- @staticmethod
- def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize bytearray into base-64 string.
-
- :param str attr: Object to be serialized.
- :rtype: str
- :return: serialized base64
- """
- return b64encode(attr).decode()
-
- @staticmethod
- def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize str into base-64 string.
-
- :param str attr: Object to be serialized.
- :rtype: str
- :return: serialized base64
- """
- encoded = b64encode(attr).decode("ascii")
- return encoded.strip("=").replace("+", "-").replace("/", "_")
-
- @staticmethod
- def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize Decimal object to float.
-
- :param decimal attr: Object to be serialized.
- :rtype: float
- :return: serialized decimal
- """
- return float(attr)
-
- @staticmethod
- def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize long (Py2) or int (Py3).
-
- :param int attr: Object to be serialized.
- :rtype: int/long
- :return: serialized long
- """
- return _long_type(attr)
-
- @staticmethod
- def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize Date object into ISO-8601 formatted string.
-
- :param Date attr: Object to be serialized.
- :rtype: str
- :return: serialized date
- """
- if isinstance(attr, str):
- attr = isodate.parse_date(attr)
- t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day)
- return t
-
- @staticmethod
- def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize Time object into ISO-8601 formatted string.
-
- :param datetime.time attr: Object to be serialized.
- :rtype: str
- :return: serialized time
- """
- if isinstance(attr, str):
- attr = isodate.parse_time(attr)
- t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second)
- if attr.microsecond:
- t += ".{:02}".format(attr.microsecond)
- return t
-
- @staticmethod
- def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize TimeDelta object into ISO-8601 formatted string.
-
- :param TimeDelta attr: Object to be serialized.
- :rtype: str
- :return: serialized duration
- """
- if isinstance(attr, str):
- attr = isodate.parse_duration(attr)
- return isodate.duration_isoformat(attr)
-
- @staticmethod
- def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize Datetime object into RFC-1123 formatted string.
-
- :param Datetime attr: Object to be serialized.
- :rtype: str
- :raises TypeError: if format invalid.
- :return: serialized rfc
- """
- try:
- if not attr.tzinfo:
- _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
- utc = attr.utctimetuple()
- except AttributeError as exc:
- raise TypeError("RFC1123 object must be valid Datetime object.") from exc
-
- return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
- Serializer.days[utc.tm_wday],
- utc.tm_mday,
- Serializer.months[utc.tm_mon],
- utc.tm_year,
- utc.tm_hour,
- utc.tm_min,
- utc.tm_sec,
- )
-
- @staticmethod
- def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize Datetime object into ISO-8601 formatted string.
-
- :param Datetime attr: Object to be serialized.
- :rtype: str
- :raises SerializationError: if format invalid.
- :return: serialized iso
- """
- if isinstance(attr, str):
- attr = isodate.parse_datetime(attr)
- try:
- if not attr.tzinfo:
- _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
- utc = attr.utctimetuple()
- if utc.tm_year > 9999 or utc.tm_year < 1:
- raise OverflowError("Hit max or min date")
-
- microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0")
- if microseconds:
- microseconds = "." + microseconds
- date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
- utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec
- )
- return date + microseconds + "Z"
- except (ValueError, OverflowError) as err:
- msg = "Unable to serialize datetime object."
- raise SerializationError(msg) from err
- except AttributeError as err:
- msg = "ISO-8601 object must be valid Datetime object."
- raise TypeError(msg) from err
-
- @staticmethod
- def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
- """Serialize Datetime object into IntTime format.
- This is represented as seconds.
-
- :param Datetime attr: Object to be serialized.
- :rtype: int
- :raises SerializationError: if format invalid
- :return: serialied unix
- """
- if isinstance(attr, int):
- return attr
- try:
- if not attr.tzinfo:
- _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
- return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError as exc:
- raise TypeError("Unix time object must be valid Datetime object.") from exc
-
-
-def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
- key = attr_desc["key"]
- working_data = data
-
- while "." in key:
- # Need the cast, as for some reasons "split" is typed as list[str | Any]
- dict_keys = cast(List[str], _FLATTEN.split(key))
- if len(dict_keys) == 1:
- key = _decode_attribute_map_key(dict_keys[0])
- break
- working_key = _decode_attribute_map_key(dict_keys[0])
- working_data = working_data.get(working_key, data)
- if working_data is None:
- # If at any point while following flatten JSON path see None, it means
- # that all properties under are None as well
- return None
- key = ".".join(dict_keys[1:])
-
- return working_data.get(key)
-
-
-def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
- attr, attr_desc, data
-):
- key = attr_desc["key"]
- working_data = data
-
- while "." in key:
- dict_keys = _FLATTEN.split(key)
- if len(dict_keys) == 1:
- key = _decode_attribute_map_key(dict_keys[0])
- break
- working_key = _decode_attribute_map_key(dict_keys[0])
- working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data)
- if working_data is None:
- # If at any point while following flatten JSON path see None, it means
- # that all properties under are None as well
- return None
- key = ".".join(dict_keys[1:])
-
- if working_data:
- return attribute_key_case_insensitive_extractor(key, None, working_data)
-
-
-def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
- """Extract the attribute in "data" based on the last part of the JSON path key.
-
- :param str attr: The attribute to extract
- :param dict attr_desc: The attribute description
- :param dict data: The data to extract from
- :rtype: object
- :returns: The extracted attribute
- """
- key = attr_desc["key"]
- dict_keys = _FLATTEN.split(key)
- return attribute_key_extractor(dict_keys[-1], None, data)
-
-
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
- """Extract the attribute in "data" based on the last part of the JSON path key.
-
- This is the case insensitive version of "last_rest_key_extractor"
- :param str attr: The attribute to extract
- :param dict attr_desc: The attribute description
- :param dict data: The data to extract from
- :rtype: object
- :returns: The extracted attribute
- """
- key = attr_desc["key"]
- dict_keys = _FLATTEN.split(key)
- return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data)
-
-
-def attribute_key_extractor(attr, _, data):
- return data.get(attr)
-
-
-def attribute_key_case_insensitive_extractor(attr, _, data):
- found_key = None
- lower_attr = attr.lower()
- for key in data:
- if lower_attr == key.lower():
- found_key = key
- break
-
- return data.get(found_key)
-
-
-def _extract_name_from_internal_type(internal_type):
- """Given an internal type XML description, extract correct XML name with namespace.
-
- :param dict internal_type: An model type
- :rtype: tuple
- :returns: A tuple XML name + namespace dict
- """
- internal_type_xml_map = getattr(internal_type, "_xml_map", {})
- xml_name = internal_type_xml_map.get("name", internal_type.__name__)
- xml_ns = internal_type_xml_map.get("ns", None)
- if xml_ns:
- xml_name = "{{{}}}{}".format(xml_ns, xml_name)
- return xml_name
-
-
-def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
- if isinstance(data, dict):
- return None
-
- # Test if this model is XML ready first
- if not isinstance(data, ET.Element):
- return None
-
- xml_desc = attr_desc.get("xml", {})
- xml_name = xml_desc.get("name", attr_desc["key"])
-
- # Look for a children
- is_iter_type = attr_desc["type"].startswith("[")
- is_wrapped = xml_desc.get("wrapped", False)
- internal_type = attr_desc.get("internalType", None)
- internal_type_xml_map = getattr(internal_type, "_xml_map", {})
-
- # Integrate namespace if necessary
- xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None))
- if xml_ns:
- xml_name = "{{{}}}{}".format(xml_ns, xml_name)
-
- # If it's an attribute, that's simple
- if xml_desc.get("attr", False):
- return data.get(xml_name)
-
- # If it's x-ms-text, that's simple too
- if xml_desc.get("text", False):
- return data.text
-
- # Scenario where I take the local name:
- # - Wrapped node
- # - Internal type is an enum (considered basic types)
- # - Internal type has no XML/Name node
- if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)):
- children = data.findall(xml_name)
- # If internal type has a local name and it's not a list, I use that name
- elif not is_iter_type and internal_type and "name" in internal_type_xml_map:
- xml_name = _extract_name_from_internal_type(internal_type)
- children = data.findall(xml_name)
- # That's an array
- else:
- if internal_type: # Complex type, ignore itemsName and use the complex type name
- items_name = _extract_name_from_internal_type(internal_type)
- else:
- items_name = xml_desc.get("itemsName", xml_name)
- children = data.findall(items_name)
-
- if len(children) == 0:
- if is_iter_type:
- if is_wrapped:
- return None # is_wrapped no node, we want None
- return [] # not wrapped, assume empty list
- return None # Assume it's not there, maybe an optional node.
-
- # If is_iter_type and not wrapped, return all found children
- if is_iter_type:
- if not is_wrapped:
- return children
- # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
- xml_name
- )
- )
- return list(children[0]) # Might be empty list and that's ok.
-
- # Here it's not a itertype, we should have found one element only or empty
- if len(children) > 1:
- raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name))
- return children[0]
-
-
-class Deserializer:
- """Response object model deserializer.
-
- :param dict classes: Class type dictionary for deserializing complex types.
- :ivar list key_extractors: Ordered list of extractors to be used by this deserializer.
- """
-
- basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
-
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
-
- def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
- self.deserialize_type = {
- "iso-8601": Deserializer.deserialize_iso,
- "rfc-1123": Deserializer.deserialize_rfc,
- "unix-time": Deserializer.deserialize_unix,
- "duration": Deserializer.deserialize_duration,
- "date": Deserializer.deserialize_date,
- "time": Deserializer.deserialize_time,
- "decimal": Deserializer.deserialize_decimal,
- "long": Deserializer.deserialize_long,
- "bytearray": Deserializer.deserialize_bytearray,
- "base64": Deserializer.deserialize_base64,
- "object": self.deserialize_object,
- "[]": self.deserialize_iter,
- "{}": self.deserialize_dict,
- }
- self.deserialize_expected_types = {
- "duration": (isodate.Duration, datetime.timedelta),
- "iso-8601": (datetime.datetime),
- }
- self.dependencies: Dict[str, type] = dict(classes) if classes else {}
- self.key_extractors = [rest_key_extractor, xml_key_extractor]
- # Additional properties only works if the "rest_key_extractor" is used to
- # extract the keys. Making it to work whatever the key extractor is too much
- # complicated, with no real scenario for now.
- # So adding a flag to disable additional properties detection. This flag should be
- # used if your expect the deserialization to NOT come from a JSON REST syntax.
- # Otherwise, result are unexpected
- self.additional_properties_detection = True
-
- def __call__(self, target_obj, response_data, content_type=None):
- """Call the deserializer to process a REST response.
-
- :param str target_obj: Target data type to deserialize to.
- :param requests.Response response_data: REST response object.
- :param str content_type: Swagger "produces" if available.
- :raises DeserializationError: if deserialization fails.
- :return: Deserialized object.
- :rtype: object
- """
- data = self._unpack_content(response_data, content_type)
- return self._deserialize(target_obj, data)
-
- def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
- """Call the deserializer on a model.
-
- Data needs to be already deserialized as JSON or XML ElementTree
-
- :param str target_obj: Target data type to deserialize to.
- :param object data: Object to deserialize.
- :raises DeserializationError: if deserialization fails.
- :return: Deserialized object.
- :rtype: object
- """
- # This is already a model, go recursive just in case
- if hasattr(data, "_attribute_map"):
- constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
- try:
- for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
- if attr in constants:
- continue
- value = getattr(data, attr)
- if value is None:
- continue
- local_type = mapconfig["type"]
- internal_data_type = local_type.strip("[]{}")
- if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum):
- continue
- setattr(data, attr, self._deserialize(local_type, value))
- return data
- except AttributeError:
- return
-
- response, class_name = self._classify_target(target_obj, data)
-
- if isinstance(response, str):
- return self.deserialize_data(data, response)
- if isinstance(response, type) and issubclass(response, Enum):
- return self.deserialize_enum(data, response)
-
- if data is None or data is CoreNull:
- return data
- try:
- attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
- d_attrs = {}
- for attr, attr_desc in attributes.items():
- # Check empty string. If it's not empty, someone has a real "additionalProperties"...
- if attr == "additional_properties" and attr_desc["key"] == "":
- continue
- raw_value = None
- # Enhance attr_desc with some dynamic data
- attr_desc = attr_desc.copy() # Do a copy, do not change the real one
- internal_data_type = attr_desc["type"].strip("[]{}")
- if internal_data_type in self.dependencies:
- attr_desc["internalType"] = self.dependencies[internal_data_type]
-
- for key_extractor in self.key_extractors:
- found_value = key_extractor(attr, attr_desc, data)
- if found_value is not None:
- if raw_value is not None and raw_value != found_value:
- msg = (
- "Ignoring extracted value '%s' from %s for key '%s'"
- " (duplicate extraction, follow extractors order)"
- )
- _LOGGER.warning(msg, found_value, key_extractor, attr)
- continue
- raw_value = found_value
-
- value = self.deserialize_data(raw_value, attr_desc["type"])
- d_attrs[attr] = value
- except (AttributeError, TypeError, KeyError) as err:
- msg = "Unable to deserialize to object: " + class_name # type: ignore
- raise DeserializationError(msg) from err
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
-
- def _build_additional_properties(self, attribute_map, data):
- if not self.additional_properties_detection:
- return None
- if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "":
- # Check empty string. If it's not empty, someone has a real "additionalProperties"
- return None
- if isinstance(data, ET.Element):
- data = {el.tag: el.text for el in data}
-
- known_keys = {
- _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0])
- for desc in attribute_map.values()
- if desc["key"] != ""
- }
- present_keys = set(data.keys())
- missing_keys = present_keys - known_keys
- return {key: data[key] for key in missing_keys}
-
- def _classify_target(self, target, data):
- """Check to see whether the deserialization target object can
- be classified into a subclass.
- Once classification has been determined, initialize object.
-
- :param str target: The target object type to deserialize to.
- :param str/dict data: The response data to deserialize.
- :return: The classified target object and its class name.
- :rtype: tuple
- """
- if target is None:
- return None, None
-
- if isinstance(target, str):
- try:
- target = self.dependencies[target]
- except KeyError:
- return target, target
-
- try:
- target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
- except AttributeError:
- pass # Target is not a Model, no classify
- return target, target.__class__.__name__ # type: ignore
-
- def failsafe_deserialize(self, target_obj, data, content_type=None):
- """Ignores any errors encountered in deserialization,
- and falls back to not deserializing the object. Recommended
- for use in error deserialization, as we want to return the
- HttpResponseError to users, and not have them deal with
- a deserialization error.
-
- :param str target_obj: The target object type to deserialize to.
- :param str/dict data: The response data to deserialize.
- :param str content_type: Swagger "produces" if available.
- :return: Deserialized object.
- :rtype: object
- """
- try:
- return self(target_obj, data, content_type=content_type)
- except: # pylint: disable=bare-except
- _LOGGER.debug(
- "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
- )
- return None
-
- @staticmethod
- def _unpack_content(raw_data, content_type=None):
- """Extract the correct structure for deserialization.
-
- If raw_data is a PipelineResponse, try to extract the result of RawDeserializer.
- if we can't, raise. Your Pipeline should have a RawDeserializer.
-
- If not a pipeline response and raw_data is bytes or string, use content-type
- to decode it. If no content-type, try JSON.
-
- If raw_data is something else, bypass all logic and return it directly.
-
- :param obj raw_data: Data to be processed.
- :param str content_type: How to parse if raw_data is a string/bytes.
- :raises JSONDecodeError: If JSON is requested and parsing is impossible.
- :raises UnicodeDecodeError: If bytes is not UTF8
- :rtype: object
- :return: Unpacked content.
- """
- # Assume this is enough to detect a Pipeline Response without importing it
- context = getattr(raw_data, "context", {})
- if context:
- if RawDeserializer.CONTEXT_NAME in context:
- return context[RawDeserializer.CONTEXT_NAME]
- raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize")
-
- # Assume this is enough to recognize universal_http.ClientResponse without importing it
- if hasattr(raw_data, "body"):
- return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers)
-
- # Assume this enough to recognize requests.Response without importing it.
- if hasattr(raw_data, "_content_consumed"):
- return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers)
-
- if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"):
- return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore
- return raw_data
-
- def _instantiate_model(self, response, attrs, additional_properties=None):
- """Instantiate a response model passing in deserialized args.
-
- :param Response response: The response model class.
- :param dict attrs: The deserialized response attributes.
- :param dict additional_properties: Additional properties to be set.
- :rtype: Response
- :return: The instantiated response model.
- """
- if callable(response):
- subtype = getattr(response, "_subtype_map", {})
- try:
- readonly = [
- k
- for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
- if v.get("readonly")
- ]
- const = [
- k
- for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
- if v.get("constant")
- ]
- kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
- response_obj = response(**kwargs)
- for attr in readonly:
- setattr(response_obj, attr, attrs.get(attr))
- if additional_properties:
- response_obj.additional_properties = additional_properties # type: ignore
- return response_obj
- except TypeError as err:
- msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err)) from err
- else:
- try:
- for attr, value in attrs.items():
- setattr(response, attr, value)
- return response
- except Exception as exp:
- msg = "Unable to populate response model. "
- msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg) from exp
-
- def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
- """Process data for deserialization according to data type.
-
- :param str data: The response string to be deserialized.
- :param str data_type: The type to deserialize to.
- :raises DeserializationError: if deserialization fails.
- :return: Deserialized object.
- :rtype: object
- """
- if data is None:
- return data
-
- try:
- if not data_type:
- return data
- if data_type in self.basic_types.values():
- return self.deserialize_basic(data, data_type)
- if data_type in self.deserialize_type:
- if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
- return data
-
- is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
- "object",
- "[]",
- r"{}",
- ]
- if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
- return None
- data_val = self.deserialize_type[data_type](data)
- return data_val
-
- iter_type = data_type[0] + data_type[-1]
- if iter_type in self.deserialize_type:
- return self.deserialize_type[iter_type](data, data_type[1:-1])
-
- obj_type = self.dependencies[data_type]
- if issubclass(obj_type, Enum):
- if isinstance(data, ET.Element):
- data = data.text
- return self.deserialize_enum(data, obj_type)
-
- except (ValueError, TypeError, AttributeError) as err:
- msg = "Unable to deserialize response data."
- msg += " Data: {}, {}".format(data, data_type)
- raise DeserializationError(msg) from err
- return self._deserialize(obj_type, data)
-
- def deserialize_iter(self, attr, iter_type):
- """Deserialize an iterable.
-
- :param list attr: Iterable to be deserialized.
- :param str iter_type: The type of object in the iterable.
- :return: Deserialized iterable.
- :rtype: list
- """
- if attr is None:
- return None
- if isinstance(attr, ET.Element): # If I receive an element here, get the children
- attr = list(attr)
- if not isinstance(attr, (list, set)):
- raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr)))
- return [self.deserialize_data(a, iter_type) for a in attr]
-
- def deserialize_dict(self, attr, dict_type):
- """Deserialize a dictionary.
-
- :param dict/list attr: Dictionary to be deserialized. Also accepts
- a list of key, value pairs.
- :param str dict_type: The object type of the items in the dictionary.
- :return: Deserialized dictionary.
- :rtype: dict
- """
- if isinstance(attr, list):
- return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr}
-
- if isinstance(attr, ET.Element):
- # Transform value into {"Key": "value"}
- attr = {el.tag: el.text for el in attr}
- return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
-
- def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
- """Deserialize a generic object.
- This will be handled as a dictionary.
-
- :param dict attr: Dictionary to be deserialized.
- :return: Deserialized object.
- :rtype: dict
- :raises TypeError: if non-builtin datatype encountered.
- """
- if attr is None:
- return None
- if isinstance(attr, ET.Element):
- # Do no recurse on XML, just return the tree as-is
- return attr
- if isinstance(attr, str):
- return self.deserialize_basic(attr, "str")
- obj_type = type(attr)
- if obj_type in self.basic_types:
- return self.deserialize_basic(attr, self.basic_types[obj_type])
- if obj_type is _long_type:
- return self.deserialize_long(attr)
-
- if obj_type == dict:
- deserialized = {}
- for key, value in attr.items():
- try:
- deserialized[key] = self.deserialize_object(value, **kwargs)
- except ValueError:
- deserialized[key] = None
- return deserialized
-
- if obj_type == list:
- deserialized = []
- for obj in attr:
- try:
- deserialized.append(self.deserialize_object(obj, **kwargs))
- except ValueError:
- pass
- return deserialized
-
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
-
- def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
- """Deserialize basic builtin data type from string.
- Will attempt to convert to str, int, float and bool.
- This function will also accept '1', '0', 'true' and 'false' as
- valid bool values.
-
- :param str attr: response string to be deserialized.
- :param str data_type: deserialization data type.
- :return: Deserialized basic type.
- :rtype: str, int, float or bool
- :raises TypeError: if string format is not valid.
- """
- # If we're here, data is supposed to be a basic type.
- # If it's still an XML node, take the text
- if isinstance(attr, ET.Element):
- attr = attr.text
- if not attr:
- if data_type == "str":
- # None or '', node is empty string.
- return ""
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
-
- if data_type == "bool":
- if attr in [True, False, 1, 0]:
- return bool(attr)
- if isinstance(attr, str):
- if attr.lower() in ["true", "1"]:
- return True
- if attr.lower() in ["false", "0"]:
- return False
- raise TypeError("Invalid boolean value: {}".format(attr))
-
- if data_type == "str":
- return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec # pylint: disable=eval-used
-
- @staticmethod
- def deserialize_unicode(data):
- """Preserve unicode objects in Python 2, otherwise return data
- as a string.
-
- :param str data: response string to be deserialized.
- :return: Deserialized string.
- :rtype: str or unicode
- """
- # We might be here because we have an enum modeled as string,
- # and we try to deserialize a partial dict with enum inside
- if isinstance(data, Enum):
- return data
-
- # Consider this is real string
- try:
- if isinstance(data, unicode): # type: ignore
- return data
- except NameError:
- return str(data)
- return str(data)
-
- @staticmethod
- def deserialize_enum(data, enum_obj):
- """Deserialize string into enum object.
-
- If the string is not a valid enum value it will be returned as-is
- and a warning will be logged.
-
- :param str data: Response string to be deserialized. If this value is
- None or invalid it will be returned as-is.
- :param Enum enum_obj: Enum object to deserialize to.
- :return: Deserialized enum object.
- :rtype: Enum
- """
- if isinstance(data, enum_obj) or data is None:
- return data
- if isinstance(data, Enum):
- data = data.value
- if isinstance(data, int):
- # Workaround. We might consider remove it in the future.
- try:
- return list(enum_obj.__members__.values())[data]
- except IndexError as exc:
- error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj)) from exc
- try:
- return enum_obj(str(data))
- except ValueError:
- for enum_value in enum_obj:
- if enum_value.value.lower() == str(data).lower():
- return enum_value
- # We don't fail anymore for unknown value, we deserialize as a string
- _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj)
- return Deserializer.deserialize_unicode(data)
-
- @staticmethod
- def deserialize_bytearray(attr):
- """Deserialize string into bytearray.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized bytearray
- :rtype: bytearray
- :raises TypeError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- return bytearray(b64decode(attr)) # type: ignore
-
- @staticmethod
- def deserialize_base64(attr):
- """Deserialize base64 encoded string into string.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized base64 string
- :rtype: bytearray
- :raises TypeError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore
- attr = attr + padding # type: ignore
- encoded = attr.replace("-", "+").replace("_", "/")
- return b64decode(encoded)
-
- @staticmethod
- def deserialize_decimal(attr):
- """Deserialize string into Decimal object.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized decimal
- :raises DeserializationError: if string format invalid.
- :rtype: decimal
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- try:
- return decimal.Decimal(str(attr)) # type: ignore
- except decimal.DecimalException as err:
- msg = "Invalid decimal {}".format(attr)
- raise DeserializationError(msg) from err
-
- @staticmethod
- def deserialize_long(attr):
- """Deserialize string into long (Py2) or int (Py3).
-
- :param str attr: response string to be deserialized.
- :return: Deserialized int
- :rtype: long or int
- :raises ValueError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- return _long_type(attr) # type: ignore
-
- @staticmethod
- def deserialize_duration(attr):
- """Deserialize ISO-8601 formatted string into TimeDelta object.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized duration
- :rtype: TimeDelta
- :raises DeserializationError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- try:
- duration = isodate.parse_duration(attr)
- except (ValueError, OverflowError, AttributeError) as err:
- msg = "Cannot deserialize duration object."
- raise DeserializationError(msg) from err
- return duration
-
- @staticmethod
- def deserialize_date(attr):
- """Deserialize ISO-8601 formatted string into Date object.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized date
- :rtype: Date
- :raises DeserializationError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
- raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
- # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
- return isodate.parse_date(attr, defaultmonth=0, defaultday=0)
-
- @staticmethod
- def deserialize_time(attr):
- """Deserialize ISO-8601 formatted string into time object.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized time
- :rtype: datetime.time
- :raises DeserializationError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
- raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
- return isodate.parse_time(attr)
-
- @staticmethod
- def deserialize_rfc(attr):
- """Deserialize RFC-1123 formatted string into Datetime object.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized RFC datetime
- :rtype: Datetime
- :raises DeserializationError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- try:
- parsed_date = email.utils.parsedate_tz(attr) # type: ignore
- date_obj = datetime.datetime(
- *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60))
- )
- if not date_obj.tzinfo:
- date_obj = date_obj.astimezone(tz=TZ_UTC)
- except ValueError as err:
- msg = "Cannot deserialize to rfc datetime object."
- raise DeserializationError(msg) from err
- return date_obj
-
- @staticmethod
- def deserialize_iso(attr):
- """Deserialize ISO-8601 formatted string into Datetime object.
-
- :param str attr: response string to be deserialized.
- :return: Deserialized ISO datetime
- :rtype: Datetime
- :raises DeserializationError: if string format invalid.
- """
- if isinstance(attr, ET.Element):
- attr = attr.text
- try:
- attr = attr.upper() # type: ignore
- match = Deserializer.valid_date.match(attr)
- if not match:
- raise ValueError("Invalid datetime string: " + attr)
-
- check_decimal = attr.split(".")
- if len(check_decimal) > 1:
- decimal_str = ""
- for digit in check_decimal[1]:
- if digit.isdigit():
- decimal_str += digit
- else:
- break
- if len(decimal_str) > 6:
- attr = attr.replace(decimal_str, decimal_str[0:6])
-
- date_obj = isodate.parse_datetime(attr)
- test_utc = date_obj.utctimetuple()
- if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
- raise OverflowError("Hit max or min date")
- except (ValueError, OverflowError, AttributeError) as err:
- msg = "Cannot deserialize datetime object."
- raise DeserializationError(msg) from err
- return date_obj
-
- @staticmethod
- def deserialize_unix(attr):
- """Serialize Datetime object into IntTime format.
- This is represented as seconds.
-
- :param int attr: Object to be serialized.
- :return: Deserialized datetime
- :rtype: Datetime
- :raises DeserializationError: if format invalid
- """
- if isinstance(attr, ET.Element):
- attr = int(attr.text) # type: ignore
- try:
- attr = int(attr)
- date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC)
- except ValueError as err:
- msg = "Cannot deserialize to unix datetime object."
- raise DeserializationError(msg) from err
- return date_obj
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_utils/model_base.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_utils/model_base.py
index 49d5c7259389..b4433021b4e5 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_utils/model_base.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_utils/model_base.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines
+# pylint: disable=line-too-long,useless-suppression,too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -29,6 +29,7 @@
from azure.core import CaseInsensitiveEnumMeta
from azure.core.pipeline import PipelineResponse
from azure.core.serialization import _Null
+from azure.core.rest import HttpResponse
_LOGGER = logging.getLogger(__name__)
@@ -36,6 +37,7 @@
TZ_UTC = timezone.utc
_T = typing.TypeVar("_T")
+_NONE_TYPE = type(None)
def _timedelta_as_isostr(td: timedelta) -> str:
@@ -170,6 +172,21 @@ def default(self, o): # pylint: disable=too-many-return-statements
r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT"
)
+_ARRAY_ENCODE_MAPPING = {
+ "pipeDelimited": "|",
+ "spaceDelimited": " ",
+ "commaDelimited": ",",
+ "newlineDelimited": "\n",
+}
+
+
+def _deserialize_array_encoded(delimit: str, attr):
+ if isinstance(attr, str):
+ if attr == "":
+ return []
+ return attr.split(delimit)
+ return attr
+
def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime:
"""Deserialize ISO-8601 formatted string into Datetime object.
@@ -201,7 +218,7 @@ def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime:
test_utc = date_obj.utctimetuple()
if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
raise OverflowError("Hit max or min date")
- return date_obj
+ return date_obj # type: ignore[no-any-return]
def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime:
@@ -255,7 +272,7 @@ def _deserialize_time(attr: typing.Union[str, time]) -> time:
"""
if isinstance(attr, time):
return attr
- return isodate.parse_time(attr)
+ return isodate.parse_time(attr) # type: ignore[no-any-return]
def _deserialize_bytes(attr):
@@ -314,6 +331,8 @@ def _deserialize_int_as_str(attr):
def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None):
if annotation is int and rf and rf._format == "str":
return _deserialize_int_as_str
+ if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING:
+ return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format])
if rf and rf._format:
return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format)
return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
@@ -345,16 +364,46 @@ def _get_model(module_name: str, model_name: str):
class _MyMutableMapping(MutableMapping[str, typing.Any]):
- def __init__(self, data: typing.Dict[str, typing.Any]) -> None:
+ def __init__(self, data: dict[str, typing.Any]) -> None:
self._data = data
def __contains__(self, key: typing.Any) -> bool:
return key in self._data
def __getitem__(self, key: str) -> typing.Any:
+ # If this key has been deserialized (for mutable types), we need to handle serialization
+ if hasattr(self, "_attr_to_rest_field"):
+ cache_attr = f"_deserialized_{key}"
+ if hasattr(self, cache_attr):
+ rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key)
+ if rf:
+ value = self._data.get(key)
+ if isinstance(value, (dict, list, set)):
+ # For mutable types, serialize and return
+ # But also update _data with serialized form and clear flag
+ # so mutations via this returned value affect _data
+ serialized = _serialize(value, rf._format)
+ # If serialized form is same type (no transformation needed),
+ # return _data directly so mutations work
+ if isinstance(serialized, type(value)) and serialized == value:
+ return self._data.get(key)
+ # Otherwise return serialized copy and clear flag
+ try:
+ object.__delattr__(self, cache_attr)
+ except AttributeError:
+ pass
+ # Store serialized form back
+ self._data[key] = serialized
+ return serialized
return self._data.__getitem__(key)
def __setitem__(self, key: str, value: typing.Any) -> None:
+ # Clear any cached deserialized value when setting through dictionary access
+ cache_attr = f"_deserialized_{key}"
+ try:
+ object.__delattr__(self, cache_attr)
+ except AttributeError:
+ pass
self._data.__setitem__(key, value)
def __delitem__(self, key: str) -> None:
@@ -425,7 +474,7 @@ def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any:
return self._data.pop(key)
return self._data.pop(key, default)
- def popitem(self) -> typing.Tuple[str, typing.Any]:
+ def popitem(self) -> tuple[str, typing.Any]:
"""
Removes and returns some (key, value) pair
:returns: The (key, value) pair.
@@ -466,6 +515,8 @@ def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any:
return self._data.setdefault(key, default)
def __eq__(self, other: typing.Any) -> bool:
+ if isinstance(other, _MyMutableMapping):
+ return self._data == other._data
try:
other_model = self.__class__(other)
except Exception:
@@ -482,6 +533,8 @@ def _is_model(obj: typing.Any) -> bool:
def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements
if isinstance(o, list):
+ if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o):
+ return _ARRAY_ENCODE_MAPPING[format].join(o)
return [_serialize(x, format) for x in o]
if isinstance(o, dict):
return {k: _serialize(v, format) for k, v in o.items()}
@@ -513,9 +566,7 @@ def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-m
return o
-def _get_rest_field(
- attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str
-) -> typing.Optional["_RestField"]:
+def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]:
try:
return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name)
except StopIteration:
@@ -538,7 +589,7 @@ class Model(_MyMutableMapping):
_is_model = True
# label whether current class's _attr_to_rest_field has been calculated
# could not see _attr_to_rest_field directly because subclass inherits it from parent class
- _calculated: typing.Set[str] = set()
+ _calculated: set[str] = set()
def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None:
class_name = self.__class__.__name__
@@ -623,7 +674,7 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self:
# we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping',
# 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object'
mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order
- attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property
+ attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property
k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type")
}
annotations = {
@@ -638,7 +689,7 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self:
rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None))
if not rf._rest_name_input:
rf._rest_name_input = attr
- cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items())
+ cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items())
cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}")
return super().__new__(cls)
@@ -680,7 +731,7 @@ def _deserialize(cls, data, exist_discriminators):
mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member
return mapped_cls._deserialize(data, exist_discriminators)
- def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]:
+ def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]:
"""Return a dict that can be turned into json using json.dump.
:keyword bool exclude_readonly: Whether to remove the readonly properties.
@@ -740,7 +791,7 @@ def _deserialize_with_union(deserializers, obj):
def _deserialize_dict(
value_deserializer: typing.Optional[typing.Callable],
module: typing.Optional[str],
- obj: typing.Dict[typing.Any, typing.Any],
+ obj: dict[typing.Any, typing.Any],
):
if obj is None:
return obj
@@ -750,7 +801,7 @@ def _deserialize_dict(
def _deserialize_multiple_sequence(
- entry_deserializers: typing.List[typing.Optional[typing.Callable]],
+ entry_deserializers: list[typing.Optional[typing.Callable]],
module: typing.Optional[str],
obj,
):
@@ -759,6 +810,14 @@ def _deserialize_multiple_sequence(
return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers))
+def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool:
+ return (
+ isinstance(deserializer, functools.partial)
+ and isinstance(deserializer.args[0], functools.partial)
+ and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable
+ )
+
+
def _deserialize_sequence(
deserializer: typing.Optional[typing.Callable],
module: typing.Optional[str],
@@ -768,17 +827,30 @@ def _deserialize_sequence(
return obj
if isinstance(obj, ET.Element):
obj = list(obj)
+
+ # encoded string may be deserialized to sequence
+ if isinstance(obj, str) and isinstance(deserializer, functools.partial):
+ # for list[str]
+ if _is_array_encoded_deserializer(deserializer):
+ return deserializer(obj)
+
+ # for list[Union[...]]
+ if isinstance(deserializer.args[0], list):
+ for sub_deserializer in deserializer.args[0]:
+ if _is_array_encoded_deserializer(sub_deserializer):
+ return sub_deserializer(obj)
+
return type(obj)(_deserialize(deserializer, entry, module) for entry in obj)
-def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]:
+def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]:
return sorted(
types,
key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"),
)
-def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches
+def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches
annotation: typing.Any,
module: typing.Optional[str],
rf: typing.Optional["_RestField"] = None,
@@ -818,16 +890,16 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur
# is it optional?
try:
- if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore
+ if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore
if len(annotation.__args__) <= 2: # pyright: ignore
if_obj_deserializer = _get_deserialize_callable_from_annotation(
- next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore
+ next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore
)
return functools.partial(_deserialize_with_optional, if_obj_deserializer)
# the type is Optional[Union[...]], we need to remove the None type from the Union
annotation_copy = copy.copy(annotation)
- annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore
+ annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore
return _get_deserialize_callable_from_annotation(annotation_copy, module, rf)
except AttributeError:
pass
@@ -843,7 +915,10 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur
return functools.partial(_deserialize_with_union, deserializers)
try:
- if annotation._name == "Dict": # pyright: ignore
+ annotation_name = (
+ annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore
+ )
+ if annotation_name.lower() == "dict":
value_deserializer = _get_deserialize_callable_from_annotation(
annotation.__args__[1], module, rf # pyright: ignore
)
@@ -856,7 +931,10 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur
except (AttributeError, IndexError):
pass
try:
- if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore
+ annotation_name = (
+ annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore
+ )
+ if annotation_name.lower() in ["list", "set", "tuple", "sequence"]:
if len(annotation.__args__) > 1: # pyright: ignore
entry_deserializers = [
_get_deserialize_callable_from_annotation(dt, module, rf)
@@ -905,16 +983,20 @@ def _deserialize_with_callable(
return float(value.text) if value.text else None
if deserializer is bool:
return value.text == "true" if value.text else None
+ if deserializer and deserializer in _DESERIALIZE_MAPPING.values():
+ return deserializer(value.text) if value.text else None
+ if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values():
+ return deserializer(value.text) if value.text else None
if deserializer is None:
return value
if deserializer in [int, float, bool]:
return deserializer(value)
if isinstance(deserializer, CaseInsensitiveEnumMeta):
try:
- return deserializer(value)
+ return deserializer(value.text if isinstance(value, ET.Element) else value)
except ValueError:
# for unknown value, return raw value
- return value
+ return value.text if isinstance(value, ET.Element) else value
if isinstance(deserializer, type) and issubclass(deserializer, Model):
return deserializer._deserialize(value, [])
return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value)
@@ -940,14 +1022,14 @@ def _deserialize(
def _failsafe_deserialize(
deserializer: typing.Any,
- value: typing.Any,
+ response: HttpResponse,
module: typing.Optional[str] = None,
rf: typing.Optional["_RestField"] = None,
format: typing.Optional[str] = None,
) -> typing.Any:
try:
- return _deserialize(deserializer, value, module, rf, format)
- except DeserializationError:
+ return _deserialize(deserializer, response.json(), module, rf, format)
+ except Exception: # pylint: disable=broad-except
_LOGGER.warning(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -956,11 +1038,11 @@ def _failsafe_deserialize(
def _failsafe_deserialize_xml(
deserializer: typing.Any,
- value: typing.Any,
+ response: HttpResponse,
) -> typing.Any:
try:
- return _deserialize_xml(deserializer, value)
- except DeserializationError:
+ return _deserialize_xml(deserializer, response.text())
+ except Exception: # pylint: disable=broad-except
_LOGGER.warning(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -974,11 +1056,11 @@ def __init__(
name: typing.Optional[str] = None,
type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
is_discriminator: bool = False,
- visibility: typing.Optional[typing.List[str]] = None,
+ visibility: typing.Optional[list[str]] = None,
default: typing.Any = _UNSET,
format: typing.Optional[str] = None,
is_multipart_file_input: bool = False,
- xml: typing.Optional[typing.Dict[str, typing.Any]] = None,
+ xml: typing.Optional[dict[str, typing.Any]] = None,
):
self._type = type
self._rest_name_input = name
@@ -993,7 +1075,11 @@ def __init__(
@property
def _class_type(self) -> typing.Any:
- return getattr(self._type, "args", [None])[0]
+ result = getattr(self._type, "args", [None])[0]
+ # type may be wrapped by nested functools.partial so we need to check for that
+ if isinstance(result, functools.partial):
+ return getattr(result, "args", [None])[0]
+ return result
@property
def _rest_name(self) -> str:
@@ -1004,14 +1090,37 @@ def _rest_name(self) -> str:
def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin
# by this point, type and rest_name will have a value bc we default
# them in __new__ of the Model class
- item = obj.get(self._rest_name)
+ # Use _data.get() directly to avoid triggering __getitem__ which clears the cache
+ item = obj._data.get(self._rest_name)
if item is None:
return item
if self._is_model:
return item
- return _deserialize(self._type, _serialize(item, self._format), rf=self)
+
+ # For mutable types, we want mutations to directly affect _data
+ # Check if we've already deserialized this value
+ cache_attr = f"_deserialized_{self._rest_name}"
+ if hasattr(obj, cache_attr):
+ # Return the value from _data directly (it's been deserialized in place)
+ return obj._data.get(self._rest_name)
+
+ deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self)
+
+ # For mutable types, store the deserialized value back in _data
+ # so mutations directly affect _data
+ if isinstance(deserialized, (dict, list, set)):
+ obj._data[self._rest_name] = deserialized
+ object.__setattr__(obj, cache_attr, True) # Mark as deserialized
+ return deserialized
+
+ return deserialized
def __set__(self, obj: Model, value) -> None:
+ # Clear the cached deserialized object when setting a new value
+ cache_attr = f"_deserialized_{self._rest_name}"
+ if hasattr(obj, cache_attr):
+ object.__delattr__(obj, cache_attr)
+
if value is None:
# we want to wipe out entries if users set attr to None
try:
@@ -1036,11 +1145,11 @@ def rest_field(
*,
name: typing.Optional[str] = None,
type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
- visibility: typing.Optional[typing.List[str]] = None,
+ visibility: typing.Optional[list[str]] = None,
default: typing.Any = _UNSET,
format: typing.Optional[str] = None,
is_multipart_file_input: bool = False,
- xml: typing.Optional[typing.Dict[str, typing.Any]] = None,
+ xml: typing.Optional[dict[str, typing.Any]] = None,
) -> typing.Any:
return _RestField(
name=name,
@@ -1057,8 +1166,8 @@ def rest_discriminator(
*,
name: typing.Optional[str] = None,
type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
- visibility: typing.Optional[typing.List[str]] = None,
- xml: typing.Optional[typing.Dict[str, typing.Any]] = None,
+ visibility: typing.Optional[list[str]] = None,
+ xml: typing.Optional[dict[str, typing.Any]] = None,
) -> typing.Any:
return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml)
@@ -1077,9 +1186,9 @@ def serialize_xml(model: Model, exclude_readonly: bool = False) -> str:
def _get_element(
o: typing.Any,
exclude_readonly: bool = False,
- parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None,
+ parent_meta: typing.Optional[dict[str, typing.Any]] = None,
wrapped_element: typing.Optional[ET.Element] = None,
-) -> typing.Union[ET.Element, typing.List[ET.Element]]:
+) -> typing.Union[ET.Element, list[ET.Element]]:
if _is_model(o):
model_meta = getattr(o, "_xml", {})
@@ -1168,7 +1277,7 @@ def _get_element(
def _get_wrapped_element(
v: typing.Any,
exclude_readonly: bool,
- meta: typing.Optional[typing.Dict[str, typing.Any]],
+ meta: typing.Optional[dict[str, typing.Any]],
) -> ET.Element:
wrapped_element = _create_xml_element(
meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None
@@ -1179,7 +1288,7 @@ def _get_wrapped_element(
_get_element(v, exclude_readonly, meta, wrapped_element)
else:
wrapped_element.text = _get_primitive_type_value(v)
- return wrapped_element
+ return wrapped_element # type: ignore[no-any-return]
def _get_primitive_type_value(v) -> str:
@@ -1192,7 +1301,9 @@ def _get_primitive_type_value(v) -> str:
return str(v)
-def _create_xml_element(tag, prefix=None, ns=None):
+def _create_xml_element(
+ tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None
+) -> ET.Element:
if prefix and ns:
ET.register_namespace(prefix, ns)
if ns:
@@ -1211,7 +1322,7 @@ def _deserialize_xml(
def _convert_element(e: ET.Element):
# dict case
if len(e.attrib) > 0 or len({child.tag for child in e}) > 1:
- dict_result: typing.Dict[str, typing.Any] = {}
+ dict_result: dict[str, typing.Any] = {}
for child in e:
if dict_result.get(child.tag) is not None:
if isinstance(dict_result[child.tag], list):
@@ -1224,7 +1335,7 @@ def _convert_element(e: ET.Element):
return dict_result
# array case
if len(e) > 0:
- array_result: typing.List[typing.Any] = []
+ array_result: list[typing.Any] = []
for child in e:
array_result.append(_convert_element(child))
return array_result
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_utils/serialization.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_utils/serialization.py
index eb86ea23c965..81ec1de5922b 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_utils/serialization.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_utils/serialization.py
@@ -21,7 +21,6 @@
import sys
import codecs
from typing import (
- Dict,
Any,
cast,
Optional,
@@ -31,7 +30,6 @@
Mapping,
Callable,
MutableMapping,
- List,
)
try:
@@ -229,12 +227,12 @@ class Model:
serialization and deserialization.
"""
- _subtype_map: Dict[str, Dict[str, Any]] = {}
- _attribute_map: Dict[str, Dict[str, Any]] = {}
- _validation: Dict[str, Dict[str, Any]] = {}
+ _subtype_map: dict[str, dict[str, Any]] = {}
+ _attribute_map: dict[str, dict[str, Any]] = {}
+ _validation: dict[str, dict[str, Any]] = {}
def __init__(self, **kwargs: Any) -> None:
- self.additional_properties: Optional[Dict[str, Any]] = {}
+ self.additional_properties: Optional[dict[str, Any]] = {}
for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
@@ -311,7 +309,7 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
def as_dict(
self,
keep_readonly: bool = True,
- key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer,
+ key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer,
**kwargs: Any
) -> JSON:
"""Return a dict that can be serialized using json.dump.
@@ -380,7 +378,7 @@ def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self:
def from_dict(
cls,
data: Any,
- key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None,
+ key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None,
content_type: Optional[str] = None,
) -> Self:
"""Parse a dict using given key extractor return a model.
@@ -414,7 +412,7 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
+ result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access
return result
@classmethod
@@ -528,7 +526,7 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
"[]": self.serialize_iter,
"{}": self.serialize_dict,
}
- self.dependencies: Dict[str, type] = dict(classes) if classes else {}
+ self.dependencies: dict[str, type] = dict(classes) if classes else {}
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
@@ -579,7 +577,7 @@ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, to
if attr_name == "additional_properties" and attr_desc["key"] == "":
if target_obj.additional_properties is not None:
- serialized.update(target_obj.additional_properties)
+ serialized |= target_obj.additional_properties
continue
try:
@@ -789,7 +787,7 @@ def serialize_data(self, data, data_type, **kwargs):
# If dependencies is empty, try with current data class
# It has to be a subclass of Enum anyway
- enum_type = self.dependencies.get(data_type, data.__class__)
+ enum_type = self.dependencies.get(data_type, cast(type, data.__class__))
if issubclass(enum_type, Enum):
return Serializer.serialize_enum(data, enum_obj=enum_type)
@@ -823,13 +821,20 @@ def serialize_basic(cls, data, data_type, **kwargs):
:param str data_type: Type of object in the iterable.
:rtype: str, int, float, bool
:return: serialized object
+ :raises TypeError: raise if data_type is not one of str, int, float, bool.
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec # pylint: disable=eval-used
+ if data_type == "int":
+ return int(data)
+ if data_type == "float":
+ return float(data)
+ if data_type == "bool":
+ return bool(data)
+ raise TypeError("Unknown basic data type: {}".format(data_type))
@classmethod
def serialize_unicode(cls, data):
@@ -1184,7 +1189,7 @@ def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argumen
while "." in key:
# Need the cast, as for some reasons "split" is typed as list[str | Any]
- dict_keys = cast(List[str], _FLATTEN.split(key))
+ dict_keys = cast(list[str], _FLATTEN.split(key))
if len(dict_keys) == 1:
key = _decode_attribute_map_key(dict_keys[0])
break
@@ -1386,7 +1391,7 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
"duration": (isodate.Duration, datetime.timedelta),
"iso-8601": (datetime.datetime),
}
- self.dependencies: Dict[str, type] = dict(classes) if classes else {}
+ self.dependencies: dict[str, type] = dict(classes) if classes else {}
self.key_extractors = [rest_key_extractor, xml_key_extractor]
# Additional properties only works if the "rest_key_extractor" is used to
# extract the keys. Making it to work whatever the key extractor is too much
@@ -1759,7 +1764,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return
:param str data_type: deserialization data type.
:return: Deserialized basic type.
:rtype: str, int, float or bool
- :raises TypeError: if string format is not valid.
+ :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool.
"""
# If we're here, data is supposed to be a basic type.
# If it's still an XML node, take the text
@@ -1785,7 +1790,11 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec # pylint: disable=eval-used
+ if data_type == "int":
+ return int(attr)
+ if data_type == "float":
+ return float(attr)
+ raise TypeError("Unknown basic data type: {}".format(data_type))
@staticmethod
def deserialize_unicode(data):
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_validation.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_validation.py
new file mode 100644
index 000000000000..f5af3a4eb8a2
--- /dev/null
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_validation.py
@@ -0,0 +1,66 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+
+
+def api_version_validation(**kwargs):
+ params_added_on = kwargs.pop("params_added_on", {})
+ method_added_on = kwargs.pop("method_added_on", "")
+ api_versions_list = kwargs.pop("api_versions_list", [])
+
+ def _index_with_default(value: str, default: int = -1) -> int:
+ """Get the index of value in lst, or return default if not found.
+
+ :param value: The value to search for in the api_versions_list.
+ :type value: str
+ :param default: The default value to return if the value is not found.
+ :type default: int
+ :return: The index of the value in the list, or the default value if not found.
+ :rtype: int
+ """
+ try:
+ return api_versions_list.index(value)
+ except ValueError:
+ return default
+
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ try:
+ # this assumes the client has an _api_version attribute
+ client = args[0]
+ client_api_version = client._config.api_version # pylint: disable=protected-access
+ except AttributeError:
+ return func(*args, **kwargs)
+
+ if _index_with_default(method_added_on) > _index_with_default(client_api_version):
+ raise ValueError(
+ f"'{func.__name__}' is not available in API version "
+ f"{client_api_version}. Pass service API version {method_added_on} or newer to your client."
+ )
+
+ unsupported = {
+ parameter: api_version
+ for api_version, parameters in params_added_on.items()
+ for parameter in parameters
+ if parameter in kwargs and _index_with_default(api_version) > _index_with_default(client_api_version)
+ }
+ if unsupported:
+ raise ValueError(
+ "".join(
+ [
+ f"'{param}' is not available in API version {client_api_version}. "
+ f"Use service API version {version} or newer.\n"
+ for param, version in unsupported.items()
+ ]
+ )
+ )
+ return func(*args, **kwargs)
+
+ return wrapper
+
+ return decorator
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_vendor.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_vendor.py
deleted file mode 100644
index 3790083b97e3..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_vendor.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) Python Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-from abc import ABC
-from typing import TYPE_CHECKING
-
-from ._configuration import KeyVaultClientConfiguration
-
-if TYPE_CHECKING:
- from azure.core import PipelineClient
-
- from ._serialization import Deserializer, Serializer
-
-
-class KeyVaultClientMixinABC(ABC):
- """DO NOT use this class. It is for internal typing use only."""
-
- _client: "PipelineClient"
- _config: KeyVaultClientConfiguration
- _serialize: "Serializer"
- _deserialize: "Deserializer"
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_version.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_version.py
index 0d777283b3a2..2e2a864184cb 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_version.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_version.py
@@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-VERSION = "4.9.0b1"
+VERSION = "4.11.0"
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_client.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_client.py
index 3f8e48a8e50c..8229906ff46f 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_client.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_client.py
@@ -16,13 +16,13 @@
from .._utils.serialization import Deserializer, Serializer
from ._configuration import KeyVaultClientConfiguration
-from ._operations import KeyVaultClientOperationsMixin
+from ._operations import _KeyVaultClientOperationsMixin
if TYPE_CHECKING:
from azure.core.credentials_async import AsyncTokenCredential
-class KeyVaultClient(KeyVaultClientOperationsMixin):
+class KeyVaultClient(_KeyVaultClientOperationsMixin):
"""The key vault client performs cryptographic key operations and vault operations against the Key
Vault service.
@@ -30,8 +30,9 @@ class KeyVaultClient(KeyVaultClientOperationsMixin):
:type vault_base_url: str
:param credential: Credential used to authenticate requests to the service. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
- :keyword api_version: The API version to use for this operation. Default value is "7.6". Note
- that overriding this default value may result in unsupported behavior.
+ :keyword api_version: The API version to use for this operation. Known values are "2025-07-01"
+ and None. Default value is "2025-07-01". Note that overriding this default value may result in
+ unsupported behavior.
:paramtype api_version: str
"""
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_configuration.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_configuration.py
index 6c360c330a68..e7312192fcae 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_configuration.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_configuration.py
@@ -26,13 +26,14 @@ class KeyVaultClientConfiguration: # pylint: disable=too-many-instance-attribut
:type vault_base_url: str
:param credential: Credential used to authenticate requests to the service. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
- :keyword api_version: The API version to use for this operation. Default value is "7.6". Note
- that overriding this default value may result in unsupported behavior.
+ :keyword api_version: The API version to use for this operation. Known values are "2025-07-01"
+ and None. Default value is "2025-07-01". Note that overriding this default value may result in
+ unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, vault_base_url: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None:
- api_version: str = kwargs.pop("api_version", "7.6")
+ api_version: str = kwargs.pop("api_version", "2025-07-01")
if vault_base_url is None:
raise ValueError("Parameter 'vault_base_url' must not be None.")
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/__init__.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/__init__.py
index d514f5e4b5be..79e1a2ccf3da 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/__init__.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/__init__.py
@@ -12,14 +12,12 @@
if TYPE_CHECKING:
from ._patch import * # pylint: disable=unused-wildcard-import
-from ._operations import KeyVaultClientOperationsMixin # type: ignore
+from ._operations import _KeyVaultClientOperationsMixin # type: ignore # pylint: disable=unused-import
from ._patch import __all__ as _patch_all
from ._patch import *
from ._patch import patch_sdk as _patch_sdk
-__all__ = [
- "KeyVaultClientOperationsMixin",
-]
+__all__ = []
__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/_operations.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/_operations.py
index 4e90fe51dc50..6a157578fc7f 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/_operations.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/_operations.py
@@ -9,7 +9,7 @@
from collections.abc import MutableMapping
from io import IOBase
import json
-from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, overload
+from typing import Any, Callable, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core import AsyncPipelineClient
@@ -47,14 +47,15 @@
)
from ..._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize
from ..._utils.utils import ClientMixinABC
+from ..._validation import api_version_validation
from .._configuration import KeyVaultClientConfiguration
JSON = MutableMapping[str, Any]
T = TypeVar("T")
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]]
-class KeyVaultClientOperationsMixin(
+class _KeyVaultClientOperationsMixin(
ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], KeyVaultClientConfiguration]
):
@@ -193,6 +194,7 @@ async def set_secret(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -207,11 +209,14 @@ async def set_secret(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.SecretBundle, response.json())
@@ -259,6 +264,7 @@ async def delete_secret(self, secret_name: str, **kwargs: Any) -> _models.Delete
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -273,11 +279,14 @@ async def delete_secret(self, secret_name: str, **kwargs: Any) -> _models.Delete
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.DeletedSecretBundle, response.json())
@@ -439,6 +448,7 @@ async def update_secret(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -453,11 +463,14 @@ async def update_secret(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.SecretBundle, response.json())
@@ -467,7 +480,18 @@ async def update_secret(
return deserialized # type: ignore
@distributed_trace_async
- async def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any) -> _models.SecretBundle:
+ @api_version_validation(
+ params_added_on={"2025-06-01-preview": ["out_content_type"]},
+ api_versions_list=["7.5", "7.6-preview.2", "7.6", "2025-06-01-preview", "2025-07-01"],
+ )
+ async def get_secret(
+ self,
+ secret_name: str,
+ secret_version: str,
+ *,
+ out_content_type: Optional[Union[str, _models.ContentType]] = None,
+ **kwargs: Any
+ ) -> _models.SecretBundle:
"""Get a specified secret from a given key vault.
The GET operation is applicable to any secret stored in Azure Key Vault. This operation
@@ -478,6 +502,12 @@ async def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any)
:param secret_version: The version of the secret. This URI fragment is optional. If not
specified, the latest version of the secret is returned. Required.
:type secret_version: str
+ :keyword out_content_type: The media type (MIME type) of the certificate. If a supported format
+ is specified, the certificate content is converted to the requested format. Currently, only PFX
+ to PEM conversion is supported. If an unsupported format is specified, the request is rejected.
+ If not specified, the certificate is returned in its original format without conversion. Known
+ values are: "application/x-pkcs12" and "application/x-pem-file". Default value is None.
+ :paramtype out_content_type: str or ~azure.keyvault.secrets._generated.models.ContentType
:return: SecretBundle. The SecretBundle is compatible with MutableMapping
:rtype: ~azure.keyvault.secrets._generated.models.SecretBundle
:raises ~azure.core.exceptions.HttpResponseError:
@@ -498,6 +528,7 @@ async def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any)
_request = build_key_vault_get_secret_request(
secret_name=secret_name,
secret_version=secret_version,
+ out_content_type=out_content_type,
api_version=self._config.api_version,
headers=_headers,
params=_params,
@@ -509,6 +540,7 @@ async def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any)
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -523,11 +555,14 @@ async def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any)
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.SecretBundle, response.json())
@@ -537,7 +572,7 @@ async def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any)
return deserialized # type: ignore
@distributed_trace
- def get_secrets(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> AsyncIterable["_models.SecretItem"]:
+ def get_secrets(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> AsyncItemPaged["_models.SecretItem"]:
"""List secrets in a specified key vault.
The Get Secrets operation is applicable to the entire vault. However, only the base secret
@@ -555,7 +590,7 @@ def get_secrets(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> Asy
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
- cls: ClsType[List[_models.SecretItem]] = kwargs.pop("cls", None)
+ cls: ClsType[list[_models.SecretItem]] = kwargs.pop("cls", None)
error_map: MutableMapping = {
401: ClientAuthenticationError,
@@ -605,7 +640,10 @@ def prepare_request(next_link=None):
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.SecretItem], deserialized.get("value", []))
+ list_of_elem = _deserialize(
+ list[_models.SecretItem],
+ deserialized.get("value", []),
+ )
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, AsyncList(list_of_elem)
@@ -621,7 +659,10 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
return pipeline_response
@@ -631,7 +672,7 @@ async def get_next(next_link=None):
@distributed_trace
def get_secret_versions(
self, secret_name: str, *, maxresults: Optional[int] = None, **kwargs: Any
- ) -> AsyncIterable["_models.SecretItem"]:
+ ) -> AsyncItemPaged["_models.SecretItem"]:
"""List all versions of the specified secret.
The full secret identifier and attributes are provided in the response. No values are returned
@@ -650,7 +691,7 @@ def get_secret_versions(
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
- cls: ClsType[List[_models.SecretItem]] = kwargs.pop("cls", None)
+ cls: ClsType[list[_models.SecretItem]] = kwargs.pop("cls", None)
error_map: MutableMapping = {
401: ClientAuthenticationError,
@@ -701,7 +742,10 @@ def prepare_request(next_link=None):
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.SecretItem], deserialized.get("value", []))
+ list_of_elem = _deserialize(
+ list[_models.SecretItem],
+ deserialized.get("value", []),
+ )
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, AsyncList(list_of_elem)
@@ -717,7 +761,10 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
return pipeline_response
@@ -727,7 +774,7 @@ async def get_next(next_link=None):
@distributed_trace
def get_deleted_secrets(
self, *, maxresults: Optional[int] = None, **kwargs: Any
- ) -> AsyncIterable["_models.DeletedSecretItem"]:
+ ) -> AsyncItemPaged["_models.DeletedSecretItem"]:
"""Lists deleted secrets for the specified vault.
The Get Deleted Secrets operation returns the secrets that have been deleted for a vault
@@ -744,7 +791,7 @@ def get_deleted_secrets(
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
- cls: ClsType[List[_models.DeletedSecretItem]] = kwargs.pop("cls", None)
+ cls: ClsType[list[_models.DeletedSecretItem]] = kwargs.pop("cls", None)
error_map: MutableMapping = {
401: ClientAuthenticationError,
@@ -794,7 +841,10 @@ def prepare_request(next_link=None):
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
- list_of_elem = _deserialize(List[_models.DeletedSecretItem], deserialized.get("value", []))
+ list_of_elem = _deserialize(
+ list[_models.DeletedSecretItem],
+ deserialized.get("value", []),
+ )
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.get("nextLink") or None, AsyncList(list_of_elem)
@@ -810,7 +860,10 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
return pipeline_response
@@ -856,6 +909,7 @@ async def get_deleted_secret(self, secret_name: str, **kwargs: Any) -> _models.D
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -870,11 +924,14 @@ async def get_deleted_secret(self, secret_name: str, **kwargs: Any) -> _models.D
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.DeletedSecretBundle, response.json())
@@ -932,7 +989,10 @@ async def purge_deleted_secret(self, secret_name: str, **kwargs: Any) -> None:
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if cls:
@@ -977,6 +1037,7 @@ async def recover_deleted_secret(self, secret_name: str, **kwargs: Any) -> _mode
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -991,11 +1052,14 @@ async def recover_deleted_secret(self, secret_name: str, **kwargs: Any) -> _mode
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.SecretBundle, response.json())
@@ -1043,6 +1107,7 @@ async def backup_secret(self, secret_name: str, **kwargs: Any) -> _models.Backup
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1057,11 +1122,14 @@ async def backup_secret(self, secret_name: str, **kwargs: Any) -> _models.Backup
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.BackupSecretResult, response.json())
@@ -1179,6 +1247,7 @@ async def restore_secret(
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
+ _decompress = kwargs.pop("decompress", True)
_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
_request, stream=_stream, **kwargs
@@ -1193,11 +1262,14 @@ async def restore_secret(
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = _failsafe_deserialize(_models.KeyVaultError, response.json())
+ error = _failsafe_deserialize(
+ _models.KeyVaultError,
+ response,
+ )
raise HttpResponseError(response=response, model=error)
if _stream:
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes() if _decompress else response.iter_raw()
else:
deserialized = _deserialize(_models.SecretBundle, response.json())
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/_patch.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/_patch.py
index f7dd32510333..87676c65a8f0 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/_patch.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/_patch.py
@@ -1,14 +1,15 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
"""Customize generated code here.
Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
-from typing import List
-__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
def patch_sdk():
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_patch.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_patch.py
index f7dd32510333..87676c65a8f0 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_patch.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_patch.py
@@ -1,14 +1,15 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
"""Customize generated code here.
Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
-from typing import List
-__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
def patch_sdk():
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_vendor.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_vendor.py
deleted file mode 100644
index 2b1f525d61ea..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_vendor.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) Python Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-from abc import ABC
-from typing import TYPE_CHECKING
-
-from ._configuration import KeyVaultClientConfiguration
-
-if TYPE_CHECKING:
- from azure.core import AsyncPipelineClient
-
- from .._serialization import Deserializer, Serializer
-
-
-class KeyVaultClientMixinABC(ABC):
- """DO NOT use this class. It is for internal typing use only."""
-
- _client: "AsyncPipelineClient"
- _config: KeyVaultClientConfiguration
- _serialize: "Serializer"
- _deserialize: "Deserializer"
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/__init__.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/__init__.py
index 5a383fd12320..0ad5770b2bff 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/__init__.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/__init__.py
@@ -28,6 +28,7 @@
)
from ._enums import ( # type: ignore
+ ContentType,
DeletionRecoveryLevel,
)
from ._patch import __all__ as _patch_all
@@ -46,6 +47,7 @@
"SecretRestoreParameters",
"SecretSetParameters",
"SecretUpdateParameters",
+ "ContentType",
"DeletionRecoveryLevel",
]
__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/_enums.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/_enums.py
index 86e57e60f4a4..e827cc2a01d9 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/_enums.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/_enums.py
@@ -10,6 +10,15 @@
from azure.core import CaseInsensitiveEnumMeta
+class ContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """The media type (MIME type)."""
+
+ PFX = "application/x-pkcs12"
+ """The PKCS#12 file format."""
+ PEM = "application/x-pem-file"
+ """The PEM file format."""
+
+
class DeletionRecoveryLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Reflects the deletion recovery level currently in effect for secrets in the current vault. If
it contains 'Purgeable', the secret can be permanently deleted by a privileged user; otherwise,
@@ -20,22 +29,22 @@ class DeletionRecoveryLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Denotes a vault state in which deletion is an irreversible operation, without the possibility
for recovery. This level corresponds to no protection being available against a Delete
operation; the data is irretrievably lost upon accepting a Delete operation at the entity level
- or higher (vault, resource group, subscription etc.)"""
+ or higher (vault, resource group, subscription etc.)."""
RECOVERABLE_PURGEABLE = "Recoverable+Purgeable"
"""Denotes a vault state in which deletion is recoverable, and which also permits immediate and
permanent deletion (i.e. purge). This level guarantees the recoverability of the deleted entity
during the retention interval (90 days), unless a Purge operation is requested, or the
- subscription is cancelled. System wil permanently delete it after 90 days, if not recovered"""
+ subscription is cancelled. System wil permanently delete it after 90 days, if not recovered."""
RECOVERABLE = "Recoverable"
"""Denotes a vault state in which deletion is recoverable without the possibility for immediate
and permanent deletion (i.e. purge). This level guarantees the recoverability of the deleted
entity during the retention interval (90 days) and while the subscription is still available.
- System wil permanently delete it after 90 days, if not recovered"""
+ System wil permanently delete it after 90 days, if not recovered."""
RECOVERABLE_PROTECTED_SUBSCRIPTION = "Recoverable+ProtectedSubscription"
"""Denotes a vault and subscription state in which deletion is recoverable within retention
interval (90 days), immediate and permanent deletion (i.e. purge) is not permitted, and in
which the subscription itself cannot be permanently canceled. System wil permanently delete it
- after 90 days, if not recovered"""
+ after 90 days, if not recovered."""
CUSTOMIZED_RECOVERABLE_PURGEABLE = "CustomizedRecoverable+Purgeable"
"""Denotes a vault state in which deletion is recoverable, and which also permits immediate and
permanent deletion (i.e. purge when 7 <= SoftDeleteRetentionInDays < 90). This level guarantees
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/_models.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/_models.py
index bc241ff42cf9..59ab3d0b7a52 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/_models.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/_models.py
@@ -8,7 +8,7 @@
# pylint: disable=useless-super-delegation
import datetime
-from typing import Any, Dict, Mapping, Optional, TYPE_CHECKING, Union, overload
+from typing import Any, Mapping, Optional, TYPE_CHECKING, Union, overload
from .._utils.model_base import Model as _Model, rest_field
@@ -47,6 +47,10 @@ class DeletedSecretBundle(_Model):
:ivar managed: True if the secret's lifetime is managed by key vault. If this is a secret
backing a certificate, then managed will be true.
:vartype managed: bool
+ :ivar previous_version: The version of the previous certificate, if applicable. Applies only to
+ certificates created after June 1, 2025. Certificates created before this date are not
+ retroactively updated.
+ :vartype previous_version: str
:ivar recovery_id: The url of the recovery object, used to identify and recover the deleted
secret.
:vartype recovery_id: str
@@ -68,7 +72,7 @@ class DeletedSecretBundle(_Model):
visibility=["read", "create", "update", "delete", "query"]
)
"""The secret management attributes."""
- tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
"""Application specific metadata in the form of key-value pairs."""
kid: Optional[str] = rest_field(visibility=["read"])
"""If this is a secret backing a KV certificate, then this field specifies the corresponding key
@@ -76,6 +80,11 @@ class DeletedSecretBundle(_Model):
managed: Optional[bool] = rest_field(visibility=["read"])
"""True if the secret's lifetime is managed by key vault. If this is a secret backing a
certificate, then managed will be true."""
+ previous_version: Optional[str] = rest_field(
+ name="previousVersion", visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The version of the previous certificate, if applicable. Applies only to certificates created
+ after June 1, 2025. Certificates created before this date are not retroactively updated."""
recovery_id: Optional[str] = rest_field(
name="recoveryId", visibility=["read", "create", "update", "delete", "query"]
)
@@ -97,7 +106,8 @@ def __init__(
id: Optional[str] = None, # pylint: disable=redefined-builtin
content_type: Optional[str] = None,
attributes: Optional["_models.SecretAttributes"] = None,
- tags: Optional[Dict[str, str]] = None,
+ tags: Optional[dict[str, str]] = None,
+ previous_version: Optional[str] = None,
recovery_id: Optional[str] = None,
) -> None: ...
@@ -141,7 +151,7 @@ class DeletedSecretItem(_Model):
visibility=["read", "create", "update", "delete", "query"]
)
"""The secret management attributes."""
- tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
"""Application specific metadata in the form of key-value pairs."""
content_type: Optional[str] = rest_field(
name="contentType", visibility=["read", "create", "update", "delete", "query"]
@@ -169,7 +179,7 @@ def __init__(
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
attributes: Optional["_models.SecretAttributes"] = None,
- tags: Optional[Dict[str, str]] = None,
+ tags: Optional[dict[str, str]] = None,
content_type: Optional[str] = None,
recovery_id: Optional[str] = None,
) -> None: ...
@@ -306,6 +316,10 @@ class SecretBundle(_Model):
:ivar managed: True if the secret's lifetime is managed by key vault. If this is a secret
backing a certificate, then managed will be true.
:vartype managed: bool
+ :ivar previous_version: The version of the previous certificate, if applicable. Applies only to
+ certificates created after June 1, 2025. Certificates created before this date are not
+ retroactively updated.
+ :vartype previous_version: str
"""
value: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
@@ -320,7 +334,7 @@ class SecretBundle(_Model):
visibility=["read", "create", "update", "delete", "query"]
)
"""The secret management attributes."""
- tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
"""Application specific metadata in the form of key-value pairs."""
kid: Optional[str] = rest_field(visibility=["read"])
"""If this is a secret backing a KV certificate, then this field specifies the corresponding key
@@ -328,6 +342,11 @@ class SecretBundle(_Model):
managed: Optional[bool] = rest_field(visibility=["read"])
"""True if the secret's lifetime is managed by key vault. If this is a secret backing a
certificate, then managed will be true."""
+ previous_version: Optional[str] = rest_field(
+ name="previousVersion", visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The version of the previous certificate, if applicable. Applies only to certificates created
+ after June 1, 2025. Certificates created before this date are not retroactively updated."""
@overload
def __init__(
@@ -337,7 +356,8 @@ def __init__(
id: Optional[str] = None, # pylint: disable=redefined-builtin
content_type: Optional[str] = None,
attributes: Optional["_models.SecretAttributes"] = None,
- tags: Optional[Dict[str, str]] = None,
+ tags: Optional[dict[str, str]] = None,
+ previous_version: Optional[str] = None,
) -> None: ...
@overload
@@ -373,7 +393,7 @@ class SecretItem(_Model):
visibility=["read", "create", "update", "delete", "query"]
)
"""The secret management attributes."""
- tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
"""Application specific metadata in the form of key-value pairs."""
content_type: Optional[str] = rest_field(
name="contentType", visibility=["read", "create", "update", "delete", "query"]
@@ -389,7 +409,7 @@ def __init__(
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
attributes: Optional["_models.SecretAttributes"] = None,
- tags: Optional[Dict[str, str]] = None,
+ tags: Optional[dict[str, str]] = None,
content_type: Optional[str] = None,
) -> None: ...
@@ -449,7 +469,7 @@ class SecretSetParameters(_Model):
value: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
"""The value of the secret. Required."""
- tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
"""Application specific metadata in the form of key-value pairs."""
content_type: Optional[str] = rest_field(
name="contentType", visibility=["read", "create", "update", "delete", "query"]
@@ -465,7 +485,7 @@ def __init__(
self,
*,
value: str,
- tags: Optional[Dict[str, str]] = None,
+ tags: Optional[dict[str, str]] = None,
content_type: Optional[str] = None,
secret_attributes: Optional["_models.SecretAttributes"] = None,
) -> None: ...
@@ -500,7 +520,7 @@ class SecretUpdateParameters(_Model):
name="attributes", visibility=["read", "create", "update", "delete", "query"]
)
"""The secret management attributes."""
- tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
"""Application specific metadata in the form of key-value pairs."""
@overload
@@ -509,7 +529,7 @@ def __init__(
*,
content_type: Optional[str] = None,
secret_attributes: Optional["_models.SecretAttributes"] = None,
- tags: Optional[Dict[str, str]] = None,
+ tags: Optional[dict[str, str]] = None,
) -> None: ...
@overload
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/_patch.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/_patch.py
index f7dd32510333..87676c65a8f0 100644
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/_patch.py
+++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/models/_patch.py
@@ -1,14 +1,15 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
"""Customize generated code here.
Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
-from typing import List
-__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
def patch_sdk():
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_models.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_models.py
deleted file mode 100644
index 73ec0ad5f609..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_models.py
+++ /dev/null
@@ -1,387 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-from datetime import datetime
-
-from typing import Any, Dict, Optional, Union
-
-from ._generated import models as _models
-from ._shared import parse_key_vault_id
-
-
-class SecretProperties(object):
- """A secret's ID and attributes."""
-
- def __init__(self, *args: Any, **kwargs: Any) -> None:
- self._attributes: Optional[_models.SecretAttributes] = args[0] if args else kwargs.get("attributes", None)
- self._id: Optional[str] = args[1] if len(args) > 1 else kwargs.get("vault_id", None)
- self._vault_id = KeyVaultSecretIdentifier(self._id) if self._id else None
- self._content_type = kwargs.get("content_type", None)
- self._key_id = kwargs.get("key_id", None)
- self._managed = kwargs.get("managed", None)
- self._tags = kwargs.get("tags", None)
-
- def __repr__(self) -> str:
- return f""[:1024]
-
- @classmethod
- def _from_secret_bundle(
- cls, secret_bundle: Union[_models.DeletedSecretBundle, _models.SecretBundle]
- ) -> "SecretProperties":
- return cls(
- secret_bundle.attributes,
- secret_bundle.id,
- content_type=secret_bundle.content_type,
- key_id=secret_bundle.kid,
- managed=secret_bundle.managed,
- tags=secret_bundle.tags,
- )
-
- @classmethod
- def _from_secret_item(cls, secret_item: Union[_models.DeletedSecretItem, _models.SecretItem]) -> "SecretProperties":
- return cls(
- secret_item.attributes,
- secret_item.id,
- content_type=secret_item.content_type,
- managed=secret_item.managed,
- tags=secret_item.tags,
- )
-
- @property
- def content_type(self) -> Optional[str]:
- """An arbitrary string indicating the type of the secret.
-
- :returns: The content type of the secret.
- :rtype: str or None
- """
- return self._content_type
-
- @property
- def id(self) -> Optional[str]:
- """The secret's ID.
-
- :returns: The secret's ID.
- :rtype: str or None
- """
- return self._id
-
- @property
- def key_id(self) -> Optional[str]:
- """If this secret backs a certificate, this property is the identifier of the corresponding key.
-
- :returns: The ID of the key backing the certificate that's backed by this secret. If the secret isn't backing a
- certificate, this is None.
- :rtype: str or None
- """
- return self._key_id
-
- @property
- def enabled(self) -> Optional[bool]:
- """Whether the secret is enabled for use.
-
- :returns: True if the secret is enabled for use; False otherwise.
- :rtype: bool or None
- """
- return self._attributes.enabled if self._attributes else None
-
- @property
- def not_before(self) -> Optional[datetime]:
- """The time before which the secret cannot be used, in UTC.
-
- :returns: The time before which the secret cannot be used, in UTC.
- :rtype: ~datetime.datetime or None
- """
- return self._attributes.not_before if self._attributes else None
-
- @property
- def expires_on(self) -> Optional[datetime]:
- """When the secret expires, in UTC.
-
- :returns: When the secret expires, in UTC.
- :rtype: ~datetime.datetime or None
- """
- return self._attributes.expires if self._attributes else None
-
- @property
- def created_on(self) -> Optional[datetime]:
- """When the secret was created, in UTC.
-
- :returns: When the secret was created, in UTC.
- :rtype: ~datetime.datetime or None
- """
- return self._attributes.created if self._attributes else None
-
- @property
- def updated_on(self) -> Optional[datetime]:
- """When the secret was last updated, in UTC.
-
- :returns: When the secret was last updated, in UTC.
- :rtype: ~datetime.datetime or None
- """
- return self._attributes.updated if self._attributes else None
-
- @property
- def recoverable_days(self) -> Optional[int]:
- """The number of days the key is retained before being deleted from a soft-delete enabled Key Vault.
-
- :returns: The number of days the key is retained before being deleted from a soft-delete enabled Key Vault.
- :rtype: int or None
- """
- # recoverable_days was added in 7.1-preview
- if self._attributes and hasattr(self._attributes, "recoverable_days"):
- return self._attributes.recoverable_days
- return None
-
- @property
- def recovery_level(self) -> Optional[str]:
- """The vault's deletion recovery level for secrets.
-
- :returns: The vault's deletion recovery level for secrets.
- :rtype: str or None
- """
- return self._attributes.recovery_level if self._attributes else None
-
- @property
- def vault_url(self) -> Optional[str]:
- """URL of the vault containing the secret.
-
- :returns: URL of the vault containing the secret.
- :rtype: str or None
- """
- return self._vault_id.vault_url if self._vault_id else None
-
- @property
- def name(self) -> Optional[str]:
- """The secret's name.
-
- :returns: The secret's name.
- :rtype: str or None
- """
- return self._vault_id.name if self._vault_id else None
-
- @property
- def version(self) -> Optional[str]:
- """The secret's version.
-
- :returns: The secret's version.
- :rtype: str or None
- """
- return self._vault_id.version if self._vault_id else None
-
- @property
- def tags(self) -> Optional[Dict[str, str]]:
- """Application specific metadata in the form of key-value pairs.
-
- :returns: A dictionary of tags attached to this secret.
- :rtype: dict or None
- """
- return self._tags
-
- @property
- def managed(self) -> Optional[bool]:
- """Whether the secret's lifetime is managed by Key Vault. If the secret backs a certificate, this will be true.
-
- :returns: True if the secret's lifetime is managed by Key Vault; False otherwise.
- :rtype: bool or None
- """
- return self._managed
-
-
-class KeyVaultSecret(object):
- """All of a secret's properties, and its value.
-
- :param properties: The secret's properties.
- :type properties: ~azure.keyvault.secrets.SecretProperties
- :param value: The value of the secret.
- :type value: str or None
- """
-
- def __init__(self, properties: SecretProperties, value: Optional[str]) -> None:
- self._properties = properties
- self._value = value
-
- def __repr__(self) -> str:
- return f""[:1024]
-
- @classmethod
- def _from_secret_bundle(cls, secret_bundle: _models.SecretBundle) -> "KeyVaultSecret":
- return cls(
- properties=SecretProperties._from_secret_bundle(secret_bundle), # pylint: disable=protected-access
- value=secret_bundle.value,
- )
-
- @property
- def name(self) -> Optional[str]:
- """The secret's name.
-
- :returns: The secret's name.
- :rtype: str or None
- """
- return self._properties.name
-
- @property
- def id(self) -> Optional[str]:
- """The secret's ID.
-
- :returns: The secret's ID.
- :rtype: str or None
- """
- return self._properties.id
-
- @property
- def properties(self) -> SecretProperties:
- """The secret's properties.
-
- :returns: The secret's properties.
- :rtype: ~azure.keyvault.secrets.SecretProperties
- """
- return self._properties
-
- @property
- def value(self) -> Optional[str]:
- """The secret's value.
-
- :returns: The secret's value.
- :rtype: str or None
- """
- return self._value
-
-
-class KeyVaultSecretIdentifier(object):
- """Information about a KeyVaultSecret parsed from a secret ID.
-
- :param str source_id: the full original identifier of a secret
-
- :raises ValueError: if the secret ID is improperly formatted
-
- Example:
- .. literalinclude:: ../tests/test_parse_id.py
- :start-after: [START parse_key_vault_secret_id]
- :end-before: [END parse_key_vault_secret_id]
- :language: python
- :caption: Parse a secret's ID
- :dedent: 8
- """
-
- def __init__(self, source_id: str) -> None:
- self._resource_id = parse_key_vault_id(source_id)
-
- @property
- def source_id(self) -> str:
- return self._resource_id.source_id
-
- @property
- def vault_url(self) -> str:
- return self._resource_id.vault_url
-
- @property
- def name(self) -> str:
- return self._resource_id.name
-
- @property
- def version(self) -> Optional[str]:
- return self._resource_id.version
-
-
-class DeletedSecret(object):
- """A deleted secret's properties and information about its deletion.
-
- If soft-delete is enabled, returns information about its recovery as well.
-
- :param properties: The deleted secret's properties.
- :type properties: ~azure.keyvault.secrets.SecretProperties
- :param deleted_date: When the secret was deleted, in UTC.
- :type deleted_date: ~datetime.datetime or None
- :param recovery_id: An identifier used to recover the deleted secret.
- :type recovery_id: str or None
- :param scheduled_purge_date: When the secret is scheduled to be purged by Key Vault, in UTC.
- :type scheduled_purge_date: ~datetime.datetime or None
- """
-
- def __init__(
- self,
- properties: SecretProperties,
- deleted_date: Optional[datetime] = None,
- recovery_id: Optional[str] = None,
- scheduled_purge_date: Optional[datetime] = None,
- ) -> None:
- self._properties = properties
- self._deleted_date = deleted_date
- self._recovery_id = recovery_id
- self._scheduled_purge_date = scheduled_purge_date
-
- def __repr__(self) -> str:
- return f""[:1024]
-
- @classmethod
- def _from_deleted_secret_bundle(cls, deleted_secret_bundle: _models.DeletedSecretBundle) -> "DeletedSecret":
- return cls(
- properties=SecretProperties._from_secret_bundle(deleted_secret_bundle), # pylint: disable=protected-access
- deleted_date=deleted_secret_bundle.deleted_date,
- recovery_id=deleted_secret_bundle.recovery_id,
- scheduled_purge_date=deleted_secret_bundle.scheduled_purge_date,
- )
-
- @classmethod
- def _from_deleted_secret_item(cls, deleted_secret_item: _models.DeletedSecretItem) -> "DeletedSecret":
- return cls(
- properties=SecretProperties._from_secret_item(deleted_secret_item), # pylint: disable=protected-access
- deleted_date=deleted_secret_item.deleted_date,
- recovery_id=deleted_secret_item.recovery_id,
- scheduled_purge_date=deleted_secret_item.scheduled_purge_date,
- )
-
- @property
- def name(self) -> Optional[str]:
- """The secret's name.
-
- :returns: The secret's name.
- :rtype: str or None
- """
- return self._properties.name
-
- @property
- def id(self) -> Optional[str]:
- """The secret's ID.
-
- :returns: The secret's ID.
- :rtype: str or None
- """
- return self._properties.id
-
- @property
- def properties(self) -> SecretProperties:
- """The properties of the deleted secret.
-
- :returns: The properties of the deleted secret.
- :rtype: ~azure.keyvault.secrets.SecretProperties
- """
- return self._properties
-
- @property
- def deleted_date(self) -> Optional[datetime]:
- """When the secret was deleted, in UTC.
-
- :returns: When the secret was deleted, in UTC.
- :rtype: ~datetime.datetime or None
- """
- return self._deleted_date
-
- @property
- def recovery_id(self) -> Optional[str]:
- """An identifier used to recover the deleted secret.
-
- :returns: An identifier used to recover the deleted secret.
- :rtype: str or None
- """
- return self._recovery_id
-
- @property
- def scheduled_purge_date(self) -> Optional[datetime]:
- """When the secret is scheduled to be purged by Key Vault, in UTC.
-
- :returns: When the secret is scheduled to be purged by Key Vault, in UTC.
- :rtype: ~datetime.datetime or None
- """
- return self._scheduled_purge_date
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_sdk_moniker.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_sdk_moniker.py
deleted file mode 100644
index 43fd4b4a87ac..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_sdk_moniker.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-from ._version import VERSION
-
-SDK_MONIKER = f"keyvault-secrets/{VERSION}"
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/__init__.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/__init__.py
deleted file mode 100644
index cb088e31ad23..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/__init__.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-from typing import Optional
-from urllib import parse
-
-from .challenge_auth_policy import ChallengeAuthPolicy
-from .client_base import KeyVaultClientBase
-from .http_challenge import HttpChallenge
-from . import http_challenge_cache
-
-HttpChallengeCache = http_challenge_cache # to avoid aliasing pylint error (C4745)
-
-
-__all__ = [
- "ChallengeAuthPolicy",
- "HttpChallenge",
- "HttpChallengeCache",
- "KeyVaultClientBase",
-]
-
-
-class KeyVaultResourceId:
- """Represents a Key Vault identifier and its parsed contents.
-
- :param str source_id: The complete identifier received from Key Vault
- :param str vault_url: The vault URL
- :param str name: The name extracted from the ID
- :param str version: The version extracted from the ID
- """
-
- def __init__(
- self,
- source_id: str,
- vault_url: str,
- name: str,
- version: "Optional[str]" = None,
- ) -> None:
- self.source_id = source_id
- self.vault_url = vault_url
- self.name = name
- self.version = version
-
-
-def parse_key_vault_id(source_id: str) -> KeyVaultResourceId:
- try:
- parsed_uri = parse.urlparse(source_id)
- except Exception as exc:
- raise ValueError(f"'{source_id}' is not a valid ID") from exc
- if not (parsed_uri.scheme and parsed_uri.hostname):
- raise ValueError(f"'{source_id}' is not a valid ID")
-
- path = list(filter(None, parsed_uri.path.split("/")))
-
- if len(path) < 2 or len(path) > 3:
- raise ValueError(f"'{source_id}' is not a valid ID")
-
- vault_url = f"{parsed_uri.scheme}://{parsed_uri.hostname}"
- if parsed_uri.port:
- vault_url += f":{parsed_uri.port}"
-
- return KeyVaultResourceId(
- source_id=source_id,
- vault_url=vault_url,
- name=path[1],
- version=path[2] if len(path) == 3 else None,
- )
-
-
-try:
- # pylint:disable=unused-import
- from .async_challenge_auth_policy import AsyncChallengeAuthPolicy
- from .async_client_base import AsyncKeyVaultClientBase
-
- __all__.extend(["AsyncChallengeAuthPolicy", "AsyncKeyVaultClientBase"])
-except (SyntaxError, ImportError):
- pass
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/_polling.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/_polling.py
deleted file mode 100644
index ff0c398bba6d..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/_polling.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-import threading
-import uuid
-from typing import Any, Callable, cast, Optional
-
-from azure.core.exceptions import ResourceNotFoundError, HttpResponseError
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpTransport
-from azure.core.polling import PollingMethod, LROPoller, NoPolling
-
-from azure.core.tracing.decorator import distributed_trace
-from azure.core.tracing.common import with_current_context
-
-
-class KeyVaultOperationPoller(LROPoller):
- """Poller for long running operations where calling result() doesn't wait for operation to complete.
-
- :param polling_method: The poller's polling method.
- :type polling_method: ~azure.core.polling.PollingMethod
- """
-
- def __init__(self, polling_method: PollingMethod) -> None:
- super(KeyVaultOperationPoller, self).__init__(None, None, lambda *_: None, NoPolling())
- self._polling_method = polling_method
-
- # pylint: disable=arguments-differ
- def result(self) -> "Any": # type: ignore
- """Returns a representation of the final resource without waiting for the operation to complete.
-
- :returns: The deserialized resource of the long running operation
- :rtype: Any
-
- :raises ~azure.core.exceptions.HttpResponseError: Server problem with the query.
- """
- return self._polling_method.resource()
-
- @distributed_trace
- def wait(self, timeout: Optional[float] = None) -> None:
- """Wait on the long running operation for a number of seconds.
-
- You can check if this call has ended with timeout with the "done()" method.
-
- :param float timeout: Period of time to wait for the long running operation to complete (in seconds).
-
- :raises ~azure.core.exceptions.HttpResponseError: Server problem with the query.
- """
-
- if not self._polling_method.finished():
- self._done = threading.Event()
- self._thread = threading.Thread(
- target=with_current_context(self._start), name=f"KeyVaultOperationPoller({uuid.uuid4()})"
- )
- self._thread.daemon = True
- self._thread.start()
-
- if self._thread is None:
- return
- self._thread.join(timeout=timeout)
- try:
- # Let's handle possible None in forgiveness here
- raise self._exception # type: ignore
- except TypeError: # Was None
- pass
-
-
-class DeleteRecoverPollingMethod(PollingMethod):
- """Poller for deleting resources, and recovering deleted resources, in vaults with soft-delete enabled.
-
- This works by polling for the existence of the deleted or recovered resource. When a resource is deleted, Key Vault
- immediately removes it from its collection. However, the resource will not immediately appear in the deleted
- collection. Key Vault will therefore respond 404 to GET requests for the deleted resource; when it responds 2xx,
- the resource exists in the deleted collection i.e. its deletion is complete.
-
- Similarly, while recovering a deleted resource, Key Vault will respond 404 to GET requests for the non-deleted
- resource; when it responds 2xx, the resource exists in the non-deleted collection, i.e. its recovery is complete.
-
- :param pipeline_response: The operation's original pipeline response.
- :type pipeline_response: PipelineResponse
- :param command: A callable to invoke when polling.
- :type command: Callable
- :param final_resource: The final resource returned by the polling operation.
- :type final_resource: Any
- :param bool finished: Whether or not the polling operation is completed.
- :param int interval: The polling interval, in seconds.
- """
-
- def __init__(
- self,
- pipeline_response: PipelineResponse,
- command: Callable,
- final_resource: Any,
- finished: bool,
- interval: int = 2,
- ) -> None:
- self._pipeline_response = pipeline_response
- self._command = command
- self._resource = final_resource
- self._polling_interval = interval
- self._finished = finished
-
- def _update_status(self) -> None:
- try:
- self._command()
- self._finished = True
- except ResourceNotFoundError:
- pass
- except HttpResponseError as e:
- # If we are polling on get_deleted_* and we don't have get permissions, we will get
- # ResourceNotFoundError until the resource is recovered, at which point we'll get a 403.
- if e.status_code == 403:
- self._finished = True
- else:
- raise
-
- def initialize(self, client: Any, initial_response: Any, deserialization_callback: Callable) -> None:
- pass
-
- def run(self) -> None:
- while not self.finished():
- self._update_status()
- if not self.finished():
- # We should always ask the client's transport to sleep, instead of sleeping directly
- transport: HttpTransport = cast(HttpTransport, self._pipeline_response.context.transport)
- transport.sleep(self._polling_interval)
-
- def finished(self) -> bool:
- return self._finished
-
- def resource(self) -> Any:
- return self._resource
-
- def status(self) -> str:
- return "finished" if self._finished else "polling"
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/_polling_async.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/_polling_async.py
deleted file mode 100644
index 16168229af08..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/_polling_async.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-from typing import Any, Callable, cast
-
-from azure.core.exceptions import ResourceNotFoundError, HttpResponseError
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpTransport
-from azure.core.polling import AsyncPollingMethod
-
-
-class AsyncDeleteRecoverPollingMethod(AsyncPollingMethod):
- """Poller for deleting resources, and recovering deleted resources, in vaults with soft-delete enabled.
-
- This works by polling for the existence of the deleted or recovered resource. When a resource is deleted, Key Vault
- immediately removes it from its collection. However, the resource will not immediately appear in the deleted
- collection. Key Vault will therefore respond 404 to GET requests for the deleted resource; when it responds 2xx,
- the resource exists in the deleted collection i.e. its deletion is complete.
-
- Similarly, while recovering a deleted resource, Key Vault will respond 404 to GET requests for the non-deleted
- resource; when it responds 2xx, the resource exists in the non-deleted collection, i.e. its recovery is complete.
-
- :param pipeline_response: The operation's original pipeline response.
- :type pipeline_response: PipelineResponse
- :param command: An awaitable to invoke when polling.
- :type command: Callable
- :param final_resource: The final resource returned by the polling operation.
- :type final_resource: Any
- :param bool finished: Whether or not the polling operation is completed.
- :param int interval: The polling interval, in seconds.
- """
-
- def __init__(
- self,
- pipeline_response: PipelineResponse,
- command: Callable,
- final_resource: Any,
- finished: bool,
- interval: int = 2,
- ) -> None:
- self._pipeline_response = pipeline_response
- self._command = command
- self._resource = final_resource
- self._polling_interval = interval
- self._finished = finished
-
- def initialize(self, client, initial_response, deserialization_callback):
- pass
-
- async def _update_status(self) -> None:
- try:
- await self._command()
- self._finished = True
- except ResourceNotFoundError:
- pass
- except HttpResponseError as e:
- # If we are polling on get_deleted_* and we don't have get permissions, we will get
- # ResourceNotFoundError until the resource is recovered, at which point we'll get a 403.
- if e.status_code == 403:
- self._finished = True
- else:
- raise
-
- async def run(self) -> None:
- while not self.finished():
- await self._update_status()
- if not self.finished():
- # We should always ask the client's transport to sleep, instead of sleeping directly
- transport: AsyncHttpTransport = cast(AsyncHttpTransport, self._pipeline_response.context.transport)
- await transport.sleep(self._polling_interval)
-
- def finished(self) -> bool:
- return self._finished
-
- def resource(self) -> Any:
- return self._resource
-
- def status(self) -> str:
- return "finished" if self._finished else "polling"
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/async_challenge_auth_policy.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/async_challenge_auth_policy.py
deleted file mode 100644
index 0f84607e3ccd..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/async_challenge_auth_policy.py
+++ /dev/null
@@ -1,262 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-"""Policy implementing Key Vault's challenge authentication protocol.
-
-Normally the protocol is only used for the client's first service request, upon which:
-1. The challenge authentication policy sends a copy of the request, without authorization or content.
-2. Key Vault responds 401 with a header (the 'challenge') detailing how the client should authenticate such a request.
-3. The policy authenticates according to the challenge and sends the original request with authorization.
-
-The policy caches the challenge and thus knows how to authenticate future requests. However, authentication
-requirements can change. For example, a vault may move to a new tenant. In such a case the policy will attempt the
-protocol again.
-"""
-
-from copy import deepcopy
-import sys
-import time
-from typing import Any, Callable, cast, Optional, overload, TypeVar, Union
-from urllib.parse import urlparse
-
-from typing_extensions import ParamSpec
-
-from azure.core.credentials import AccessToken, AccessTokenInfo, TokenRequestOptions
-from azure.core.credentials_async import AsyncSupportsTokenInfo, AsyncTokenCredential, AsyncTokenProvider
-from azure.core.pipeline import PipelineRequest, PipelineResponse
-from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy
-from azure.core.rest import AsyncHttpResponse, HttpRequest
-
-from .http_challenge import HttpChallenge
-from . import http_challenge_cache as ChallengeCache
-from .challenge_auth_policy import _enforce_tls, _has_claims, _update_challenge
-
-if sys.version_info < (3, 9):
- from typing import Awaitable
-else:
- from collections.abc import Awaitable
-
-
-P = ParamSpec("P")
-T = TypeVar("T")
-
-
-@overload
-async def await_result(func: Callable[P, Awaitable[T]], *args: P.args, **kwargs: P.kwargs) -> T: ...
-
-
-@overload
-async def await_result(func: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> T: ...
-
-
-async def await_result(func: Callable[P, Union[T, Awaitable[T]]], *args: P.args, **kwargs: P.kwargs) -> T:
- """If func returns an awaitable, await it.
-
- :param func: The function to run.
- :type func: callable
- :param args: The positional arguments to pass to the function.
- :type args: list
- :rtype: any
- :return: The result of the function
- """
- result = func(*args, **kwargs)
- if isinstance(result, Awaitable):
- return await result
- return result
-
-
-
-class AsyncChallengeAuthPolicy(AsyncBearerTokenCredentialPolicy):
- """Policy for handling HTTP authentication challenges.
-
- :param credential: An object which can provide an access token for the vault, such as a credential from
- :mod:`azure.identity.aio`
- :type credential: ~azure.core.credentials_async.AsyncTokenProvider
- """
-
- def __init__(self, credential: AsyncTokenProvider, *scopes: str, **kwargs: Any) -> None:
- # Pass `enable_cae` so `enable_cae=True` is always passed through self.authorize_request
- super().__init__(credential, *scopes, enable_cae=True, **kwargs)
- self._credential: AsyncTokenProvider = credential
- self._token: Optional[Union["AccessToken", "AccessTokenInfo"]] = None
- self._verify_challenge_resource = kwargs.pop("verify_challenge_resource", True)
- self._request_copy: Optional[HttpRequest] = None
-
- async def send(
- self, request: PipelineRequest[HttpRequest]
- ) -> PipelineResponse[HttpRequest, AsyncHttpResponse]:
- """Authorize request with a bearer token and send it to the next policy.
-
- We implement this method to account for the valid scenario where a Key Vault authentication challenge is
- immediately followed by a CAE claims challenge. The base class's implementation would return the second 401 to
- the caller, but we should handle that second challenge as well (and only return any third 401 response).
-
- :param request: The pipeline request object
- :type request: ~azure.core.pipeline.PipelineRequest
- :return: The pipeline response object
- :rtype: ~azure.core.pipeline.PipelineResponse
- """
- await await_result(self.on_request, request)
- response: PipelineResponse[HttpRequest, AsyncHttpResponse]
- try:
- response = await self.next.send(request)
- except Exception: # pylint:disable=broad-except
- await await_result(self.on_exception, request)
- raise
- await await_result(self.on_response, request, response)
-
- if response.http_response.status_code == 401:
- return await self.handle_challenge_flow(request, response)
- return response
-
- async def handle_challenge_flow(
- self,
- request: PipelineRequest[HttpRequest],
- response: PipelineResponse[HttpRequest, AsyncHttpResponse],
- consecutive_challenge: bool = False,
- ) -> PipelineResponse[HttpRequest, AsyncHttpResponse]:
- """Handle the challenge flow of Key Vault and CAE authentication.
-
- :param request: The pipeline request object
- :type request: ~azure.core.pipeline.PipelineRequest
- :param response: The pipeline response object
- :type response: ~azure.core.pipeline.PipelineResponse
- :param bool consecutive_challenge: Whether the challenge is arriving immediately after another challenge.
- Consecutive challenges can only be valid if a Key Vault challenge is followed by a CAE claims challenge.
- True if the preceding challenge was a Key Vault challenge; False otherwise.
-
- :return: The pipeline response object
- :rtype: ~azure.core.pipeline.PipelineResponse
- """
- self._token = None # any cached token is invalid
- if "WWW-Authenticate" in response.http_response.headers:
- # If the previous challenge was a KV challenge and this one is too, return the 401
- claims_challenge = _has_claims(response.http_response.headers["WWW-Authenticate"])
- if consecutive_challenge and not claims_challenge:
- return response
-
- request_authorized = await self.on_challenge(request, response)
- if request_authorized:
- # if we receive a challenge response, we retrieve a new token
- # which matches the new target. In this case, we don't want to remove
- # token from the request so clear the 'insecure_domain_change' tag
- request.context.options.pop("insecure_domain_change", False)
- try:
- response = await self.next.send(request)
- except Exception: # pylint:disable=broad-except
- await await_result(self.on_exception, request)
- raise
-
- # If consecutive_challenge == True, this could be a third consecutive 401
- if response.http_response.status_code == 401 and not consecutive_challenge:
- # If the previous challenge wasn't from CAE, we can try this function one more time
- if not claims_challenge:
- return await self.handle_challenge_flow(request, response, consecutive_challenge=True)
- await await_result(self.on_response, request, response)
- return response
-
-
- async def on_request(self, request: PipelineRequest) -> None:
- _enforce_tls(request)
- challenge = ChallengeCache.get_challenge_for_url(request.http_request.url)
- if challenge:
- # Note that if the vault has moved to a new tenant since our last request for it, this request will fail.
- if self._need_new_token():
- # azure-identity credentials require an AADv2 scope but the challenge may specify an AADv1 resource
- scope = challenge.get_scope() or challenge.get_resource() + "/.default"
- await self._request_kv_token(scope, challenge)
-
- bearer_token = cast(Union[AccessToken, AccessTokenInfo], self._token).token
- request.http_request.headers["Authorization"] = f"Bearer {bearer_token}"
- return
-
- # else: discover authentication information by eliciting a challenge from Key Vault. Remove any request data,
- # saving it for later. Key Vault will reject the request as unauthorized and respond with a challenge.
- # on_challenge will parse that challenge, use the original request including the body, authorize the
- # request, and tell super to send it again.
- if request.http_request.content:
- self._request_copy = request.http_request
- bodiless_request = HttpRequest(
- method=request.http_request.method,
- url=request.http_request.url,
- headers=deepcopy(request.http_request.headers),
- )
- bodiless_request.headers["Content-Length"] = "0"
- request.http_request = bodiless_request
-
- async def on_challenge(self, request: PipelineRequest, response: PipelineResponse) -> bool:
- try:
- # CAE challenges may not include a scope or tenant; cache from the previous challenge to use if necessary
- old_scope: Optional[str] = None
- old_tenant: Optional[str] = None
- cached_challenge = ChallengeCache.get_challenge_for_url(request.http_request.url)
- if cached_challenge:
- old_scope = cached_challenge.get_scope() or cached_challenge.get_resource() + "/.default"
- old_tenant = cached_challenge.tenant_id
-
- challenge = _update_challenge(request, response)
- # CAE challenges may not include a scope or tenant; use the previous challenge's values if necessary
- if challenge.claims and old_scope:
- challenge._parameters["scope"] = old_scope # pylint:disable=protected-access
- challenge.tenant_id = old_tenant
- # azure-identity credentials require an AADv2 scope but the challenge may specify an AADv1 resource
- scope = challenge.get_scope() or challenge.get_resource() + "/.default"
- except ValueError:
- return False
-
- if self._verify_challenge_resource:
- resource_domain = urlparse(scope).netloc
- if not resource_domain:
- raise ValueError(f"The challenge contains invalid scope '{scope}'.")
-
- request_domain = urlparse(request.http_request.url).netloc
- if not request_domain.lower().endswith(f".{resource_domain.lower()}"):
- raise ValueError(
- f"The challenge resource '{resource_domain}' does not match the requested domain. Pass "
- "`verify_challenge_resource=False` to your client's constructor to disable this verification. "
- "See https://aka.ms/azsdk/blog/vault-uri for more information."
- )
-
- # If we had created a request copy in on_request, use it now to send along the original body content
- if self._request_copy:
- request.http_request = self._request_copy
-
- # The tenant parsed from AD FS challenges is "adfs"; we don't actually need a tenant for AD FS authentication
- # For AD FS we skip cross-tenant authentication per https://github.com/Azure/azure-sdk-for-python/issues/28648
- if challenge.tenant_id and challenge.tenant_id.lower().endswith("adfs"):
- await self.authorize_request(request, scope, claims=challenge.claims)
- else:
- await self.authorize_request(
- request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id
- )
-
- return True
-
- def _need_new_token(self) -> bool:
- now = time.time()
- refresh_on = getattr(self._token, "refresh_on", None)
- return not self._token or (refresh_on and refresh_on <= now) or self._token.expires_on - now < 300
-
- async def _request_kv_token(self, scope: str, challenge: HttpChallenge) -> None:
- """Implementation of BearerTokenCredentialPolicy's _request_token method, but specific to Key Vault.
-
- :param str scope: The scope for which to request a token.
- :param challenge: The challenge for the request being made.
- :type challenge: HttpChallenge
- """
- # Exclude tenant for AD FS authentication
- exclude_tenant = challenge.tenant_id and challenge.tenant_id.lower().endswith("adfs")
- # The AsyncSupportsTokenInfo protocol needs TokenRequestOptions for token requests instead of kwargs
- if hasattr(self._credential, "get_token_info"):
- options: TokenRequestOptions = {"enable_cae": True}
- if challenge.tenant_id and not exclude_tenant:
- options["tenant_id"] = challenge.tenant_id
- self._token = await cast(AsyncSupportsTokenInfo, self._credential).get_token_info(scope, options=options)
- else:
- if exclude_tenant:
- self._token = await self._credential.get_token(scope, enable_cae=True)
- else:
- self._token = await cast(AsyncTokenCredential, self._credential).get_token(
- scope, tenant_id=challenge.tenant_id, enable_cae=True
- )
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/async_client_base.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/async_client_base.py
deleted file mode 100644
index 3e1a2bec8fc4..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/async_client_base.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-import sys
-from typing import Any
-
-from azure.core.credentials_async import AsyncTokenCredential
-from azure.core.pipeline.policies import HttpLoggingPolicy
-from azure.core.rest import AsyncHttpResponse, HttpRequest
-from azure.core.tracing.decorator_async import distributed_trace_async
-
-from . import AsyncChallengeAuthPolicy
-from .client_base import ApiVersion, DEFAULT_VERSION, _format_api_version, _SERIALIZER
-from .._sdk_moniker import SDK_MONIKER
-from .._generated.aio import KeyVaultClient as _KeyVaultClient
-from .._generated import models as _models
-
-if sys.version_info < (3, 9):
- from typing import Awaitable
-else:
- from collections.abc import Awaitable
-
-
-class AsyncKeyVaultClientBase(object):
- # pylint:disable=protected-access
- def __init__(self, vault_url: str, credential: AsyncTokenCredential, **kwargs: Any) -> None:
- if not credential:
- raise ValueError(
- "credential should be an object supporting the AsyncTokenCredential protocol, "
- "such as a credential from azure-identity"
- )
- if not vault_url:
- raise ValueError("vault_url must be the URL of an Azure Key Vault")
-
- try:
- self.api_version = kwargs.pop("api_version", DEFAULT_VERSION)
- # If API version was provided as an enum value, need to make a plain string for 3.11 compatibility
- if hasattr(self.api_version, "value"):
- self.api_version = self.api_version.value
- self._vault_url = vault_url.strip(" /")
-
- client = kwargs.get("generated_client")
- if client:
- # caller provided a configured client -> only models left to initialize
- self._client = client
- models = kwargs.get("generated_models")
- self._models = models or _models
- return
-
- http_logging_policy = HttpLoggingPolicy(**kwargs)
- http_logging_policy.allowed_header_names.update(
- {"x-ms-keyvault-network-info", "x-ms-keyvault-region", "x-ms-keyvault-service-version"}
- )
-
- verify_challenge = kwargs.pop("verify_challenge_resource", True)
- self._client = _KeyVaultClient(
- credential=credential,
- vault_base_url=self._vault_url,
- api_version=self.api_version,
- authentication_policy=AsyncChallengeAuthPolicy(credential, verify_challenge_resource=verify_challenge),
- sdk_moniker=SDK_MONIKER,
- http_logging_policy=http_logging_policy,
- **kwargs,
- )
- self._models = _models
- except ValueError as exc:
- # Ignore pyright error that comes from not identifying ApiVersion as an iterable enum
- raise NotImplementedError(
- f"This package doesn't support API version '{self.api_version}'. "
- + "Supported versions: "
- + f"{', '.join(v.value for v in ApiVersion)}" # pyright: ignore[reportGeneralTypeIssues]
- ) from exc
-
- @property
- def vault_url(self) -> str:
- return self._vault_url
-
- async def __aenter__(self) -> "AsyncKeyVaultClientBase":
- await self._client.__aenter__()
- return self
-
- async def __aexit__(self, *args: Any) -> None:
- await self._client.__aexit__(*args)
-
- async def close(self) -> None:
- """Close sockets opened by the client.
-
- Calling this method is unnecessary when using the client as a context manager.
- """
- await self._client.close()
-
- @distributed_trace_async
- def send_request(
- self, request: HttpRequest, *, stream: bool = False, **kwargs: Any
- ) -> Awaitable[AsyncHttpResponse]:
- """Runs a network request using the client's existing pipeline.
-
- The request URL can be relative to the vault URL. The service API version used for the request is the same as
- the client's unless otherwise specified. This method does not raise if the response is an error; to raise an
- exception, call `raise_for_status()` on the returned response object. For more information about how to send
- custom requests with this method, see https://aka.ms/azsdk/dpcodegen/python/send_request.
-
- :param request: The network request you want to make.
- :type request: ~azure.core.rest.HttpRequest
-
- :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
-
- :return: The response of your network call. Does not do error handling on your response.
- :rtype: ~azure.core.rest.AsyncHttpResponse
- """
- request_copy = _format_api_version(request, self.api_version)
- path_format_arguments = {
- "vaultBaseUrl": _SERIALIZER.url("vault_base_url", self._vault_url, "str", skip_quote=True),
- }
- request_copy.url = self._client._client.format_url(request_copy.url, **path_format_arguments)
- return self._client._client.send_request(request_copy, stream=stream, **kwargs)
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/challenge_auth_policy.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/challenge_auth_policy.py
deleted file mode 100644
index eb4073d0e699..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/challenge_auth_policy.py
+++ /dev/null
@@ -1,270 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-"""Policy implementing Key Vault's challenge authentication protocol.
-
-Normally the protocol is only used for the client's first service request, upon which:
-1. The challenge authentication policy sends a copy of the request, without authorization or content.
-2. Key Vault responds 401 with a header (the 'challenge') detailing how the client should authenticate such a request.
-3. The policy authenticates according to the challenge and sends the original request with authorization.
-
-The policy caches the challenge and thus knows how to authenticate future requests. However, authentication
-requirements can change. For example, a vault may move to a new tenant. In such a case the policy will attempt the
-protocol again.
-"""
-
-from copy import deepcopy
-import time
-from typing import Any, cast, Optional, Union
-from urllib.parse import urlparse
-
-from azure.core.credentials import (
- AccessToken,
- AccessTokenInfo,
- TokenCredential,
- TokenProvider,
- TokenRequestOptions,
- SupportsTokenInfo,
-)
-from azure.core.exceptions import ServiceRequestError
-from azure.core.pipeline import PipelineRequest, PipelineResponse
-from azure.core.pipeline.policies import BearerTokenCredentialPolicy
-from azure.core.rest import HttpRequest, HttpResponse
-
-from .http_challenge import HttpChallenge
-from . import http_challenge_cache as ChallengeCache
-
-
-def _enforce_tls(request: PipelineRequest) -> None:
- if not request.http_request.url.lower().startswith("https"):
- raise ServiceRequestError(
- "Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."
- )
-
-
-def _has_claims(challenge: str) -> bool:
- """Check if a challenge header contains claims.
-
- :param challenge: The challenge header to check.
- :type challenge: str
-
- :returns: True if the challenge contains claims; False otherwise.
- :rtype: bool
- """
- # Split the challenge into its scheme and parameters, then check if any parameter contains claims
- split_challenge = challenge.strip().split(" ", 1)
- return any("claims=" in item for item in split_challenge[1].split(","))
-
-
-def _update_challenge(request: PipelineRequest, challenger: PipelineResponse) -> HttpChallenge:
- """Parse challenge from a challenge response, cache it, and return it.
-
- :param request: The pipeline request that prompted the challenge response.
- :type request: ~azure.core.pipeline.PipelineRequest
- :param challenger: The pipeline response containing the authentication challenge.
- :type challenger: ~azure.core.pipeline.PipelineResponse
-
- :returns: An HttpChallenge object representing the authentication challenge.
- :rtype: HttpChallenge
- """
-
- challenge = HttpChallenge(
- request.http_request.url,
- challenger.http_response.headers.get("WWW-Authenticate"),
- response_headers=challenger.http_response.headers,
- )
- ChallengeCache.set_challenge_for_url(request.http_request.url, challenge)
- return challenge
-
-
-class ChallengeAuthPolicy(BearerTokenCredentialPolicy):
- """Policy for handling HTTP authentication challenges.
-
- :param credential: An object which can provide an access token for the vault, such as a credential from
- :mod:`azure.identity`
- :type credential: ~azure.core.credentials.TokenProvider
- :param str scopes: Lets you specify the type of access needed.
- """
-
- def __init__(self, credential: TokenProvider, *scopes: str, **kwargs: Any) -> None:
- # Pass `enable_cae` so `enable_cae=True` is always passed through self.authorize_request
- super(ChallengeAuthPolicy, self).__init__(credential, *scopes, enable_cae=True, **kwargs)
- self._credential: TokenProvider = credential
- self._token: Optional[Union["AccessToken", "AccessTokenInfo"]] = None
- self._verify_challenge_resource = kwargs.pop("verify_challenge_resource", True)
- self._request_copy: Optional[HttpRequest] = None
-
- def send(self, request: PipelineRequest[HttpRequest]) -> PipelineResponse[HttpRequest, HttpResponse]:
- """Authorize request with a bearer token and send it to the next policy.
-
- We implement this method to account for the valid scenario where a Key Vault authentication challenge is
- immediately followed by a CAE claims challenge. The base class's implementation would return the second 401 to
- the caller, but we should handle that second challenge as well (and only return any third 401 response).
-
- :param request: The pipeline request object
- :type request: ~azure.core.pipeline.PipelineRequest
-
- :return: The pipeline response object
- :rtype: ~azure.core.pipeline.PipelineResponse
- """
- self.on_request(request)
- try:
- response = self.next.send(request)
- except Exception: # pylint:disable=broad-except
- self.on_exception(request)
- raise
-
- self.on_response(request, response)
- if response.http_response.status_code == 401:
- return self.handle_challenge_flow(request, response)
- return response
-
- def handle_challenge_flow(
- self,
- request: PipelineRequest[HttpRequest],
- response: PipelineResponse[HttpRequest, HttpResponse],
- consecutive_challenge: bool = False,
- ) -> PipelineResponse[HttpRequest, HttpResponse]:
- """Handle the challenge flow of Key Vault and CAE authentication.
-
- :param request: The pipeline request object
- :type request: ~azure.core.pipeline.PipelineRequest
- :param response: The pipeline response object
- :type response: ~azure.core.pipeline.PipelineResponse
- :param bool consecutive_challenge: Whether the challenge is arriving immediately after another challenge.
- Consecutive challenges can only be valid if a Key Vault challenge is followed by a CAE claims challenge.
- True if the preceding challenge was a Key Vault challenge; False otherwise.
-
- :return: The pipeline response object
- :rtype: ~azure.core.pipeline.PipelineResponse
- """
- self._token = None # any cached token is invalid
- if "WWW-Authenticate" in response.http_response.headers:
- # If the previous challenge was a KV challenge and this one is too, return the 401
- claims_challenge = _has_claims(response.http_response.headers["WWW-Authenticate"])
- if consecutive_challenge and not claims_challenge:
- return response
-
- request_authorized = self.on_challenge(request, response)
- if request_authorized:
- # if we receive a challenge response, we retrieve a new token
- # which matches the new target. In this case, we don't want to remove
- # token from the request so clear the 'insecure_domain_change' tag
- request.context.options.pop("insecure_domain_change", False)
- try:
- response = self.next.send(request)
- except Exception: # pylint:disable=broad-except
- self.on_exception(request)
- raise
-
- # If consecutive_challenge == True, this could be a third consecutive 401
- if response.http_response.status_code == 401 and not consecutive_challenge:
- # If the previous challenge wasn't from CAE, we can try this function one more time
- if not claims_challenge:
- return self.handle_challenge_flow(request, response, consecutive_challenge=True)
- self.on_response(request, response)
- return response
-
- def on_request(self, request: PipelineRequest) -> None:
- _enforce_tls(request)
- challenge = ChallengeCache.get_challenge_for_url(request.http_request.url)
- if challenge:
- # Note that if the vault has moved to a new tenant since our last request for it, this request will fail.
- if self._need_new_token:
- # azure-identity credentials require an AADv2 scope but the challenge may specify an AADv1 resource
- scope = challenge.get_scope() or challenge.get_resource() + "/.default"
- self._request_kv_token(scope, challenge)
-
- bearer_token = cast(Union["AccessToken", "AccessTokenInfo"], self._token).token
- request.http_request.headers["Authorization"] = f"Bearer {bearer_token}"
- return
-
- # else: discover authentication information by eliciting a challenge from Key Vault. Remove any request data,
- # saving it for later. Key Vault will reject the request as unauthorized and respond with a challenge.
- # on_challenge will parse that challenge, use the original request including the body, authorize the
- # request, and tell super to send it again.
- if request.http_request.content:
- self._request_copy = request.http_request
- bodiless_request = HttpRequest(
- method=request.http_request.method,
- url=request.http_request.url,
- headers=deepcopy(request.http_request.headers),
- )
- bodiless_request.headers["Content-Length"] = "0"
- request.http_request = bodiless_request
-
- def on_challenge(self, request: PipelineRequest, response: PipelineResponse) -> bool:
- try:
- # CAE challenges may not include a scope or tenant; cache from the previous challenge to use if necessary
- old_scope: Optional[str] = None
- old_tenant: Optional[str] = None
- cached_challenge = ChallengeCache.get_challenge_for_url(request.http_request.url)
- if cached_challenge:
- old_scope = cached_challenge.get_scope() or cached_challenge.get_resource() + "/.default"
- old_tenant = cached_challenge.tenant_id
-
- challenge = _update_challenge(request, response)
- # CAE challenges may not include a scope or tenant; use the previous challenge's values if necessary
- if challenge.claims and old_scope:
- challenge._parameters["scope"] = old_scope # pylint:disable=protected-access
- challenge.tenant_id = old_tenant
- # azure-identity credentials require an AADv2 scope but the challenge may specify an AADv1 resource
- scope = challenge.get_scope() or challenge.get_resource() + "/.default"
- except ValueError:
- return False
-
- if self._verify_challenge_resource:
- resource_domain = urlparse(scope).netloc
- if not resource_domain:
- raise ValueError(f"The challenge contains invalid scope '{scope}'.")
-
- request_domain = urlparse(request.http_request.url).netloc
- if not request_domain.lower().endswith(f".{resource_domain.lower()}"):
- raise ValueError(
- f"The challenge resource '{resource_domain}' does not match the requested domain. Pass "
- "`verify_challenge_resource=False` to your client's constructor to disable this verification. "
- "See https://aka.ms/azsdk/blog/vault-uri for more information."
- )
-
- # If we had created a request copy in on_request, use it now to send along the original body content
- if self._request_copy:
- request.http_request = self._request_copy
-
- # The tenant parsed from AD FS challenges is "adfs"; we don't actually need a tenant for AD FS authentication
- # For AD FS we skip cross-tenant authentication per https://github.com/Azure/azure-sdk-for-python/issues/28648
- if challenge.tenant_id and challenge.tenant_id.lower().endswith("adfs"):
- self.authorize_request(request, scope, claims=challenge.claims)
- else:
- self.authorize_request(request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id)
-
- return True
-
- @property
- def _need_new_token(self) -> bool:
- now = time.time()
- refresh_on = getattr(self._token, "refresh_on", None)
- return not self._token or (refresh_on and refresh_on <= now) or self._token.expires_on - now < 300
-
- def _request_kv_token(self, scope: str, challenge: HttpChallenge) -> None:
- """Implementation of BearerTokenCredentialPolicy's _request_token method, but specific to Key Vault.
-
- :param str scope: The scope for which to request a token.
- :param challenge: The challenge for the request being made.
- :type challenge: HttpChallenge
- """
- # Exclude tenant for AD FS authentication
- exclude_tenant = challenge.tenant_id and challenge.tenant_id.lower().endswith("adfs")
- # The SupportsTokenInfo protocol needs TokenRequestOptions for token requests instead of kwargs
- if hasattr(self._credential, "get_token_info"):
- options: TokenRequestOptions = {"enable_cae": True}
- if challenge.tenant_id and not exclude_tenant:
- options["tenant_id"] = challenge.tenant_id
- self._token = cast(SupportsTokenInfo, self._credential).get_token_info(scope, options=options)
- else:
- if exclude_tenant:
- self._token = self._credential.get_token(scope, enable_cae=True)
- else:
- self._token = cast(TokenCredential, self._credential).get_token(
- scope, tenant_id=challenge.tenant_id, enable_cae=True
- )
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/client_base.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/client_base.py
deleted file mode 100644
index ff5d529d119f..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/client_base.py
+++ /dev/null
@@ -1,161 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-from copy import deepcopy
-from enum import Enum
-from typing import Any
-from urllib.parse import urlparse
-
-from azure.core import CaseInsensitiveEnumMeta
-from azure.core.credentials import TokenCredential
-from azure.core.pipeline.policies import HttpLoggingPolicy
-from azure.core.rest import HttpRequest, HttpResponse
-from azure.core.tracing.decorator import distributed_trace
-
-from . import ChallengeAuthPolicy
-from .._generated import KeyVaultClient as _KeyVaultClient
-from .._generated import models as _models
-from .._generated._utils.serialization import Serializer
-from .._sdk_moniker import SDK_MONIKER
-
-
-class ApiVersion(str, Enum, metaclass=CaseInsensitiveEnumMeta):
- """Key Vault API versions supported by this package"""
-
- #: this is the default version
- V7_6 = "7.6"
- V7_5 = "7.5"
- V7_4 = "7.4"
- V7_3 = "7.3"
- V7_2 = "7.2"
- V7_1 = "7.1"
- V7_0 = "7.0"
- V2016_10_01 = "2016-10-01"
-
-
-DEFAULT_VERSION = ApiVersion.V7_6
-
-_SERIALIZER = Serializer()
-_SERIALIZER.client_side_validation = False
-
-
-def _format_api_version(request: HttpRequest, api_version: str) -> HttpRequest:
- """Returns a request copy that includes an api-version query parameter if one wasn't originally present.
-
- :param request: The HTTP request being sent.
- :type request: ~azure.core.rest.HttpRequest
- :param str api_version: The service API version that the request should include.
-
- :returns: A copy of the request that includes an api-version query parameter.
- :rtype: azure.core.rest.HttpRequest
- """
- request_copy = deepcopy(request)
- params = {"api-version": api_version} # By default, we want to use the client's API version
- query = urlparse(request_copy.url).query
-
- if query:
- request_copy.url = request_copy.url.partition("?")[0]
- existing_params = {p[0]: p[-1] for p in [p.partition("=") for p in query.split("&")]}
- params.update(existing_params) # If an api-version was provided, this will overwrite our default
-
- # Reconstruct the query parameters onto the URL
- query_params = []
- for k, v in params.items():
- query_params.append("{}={}".format(k, v))
- query = "?" + "&".join(query_params)
- request_copy.url = request_copy.url + query
- return request_copy
-
-
-class KeyVaultClientBase(object):
- # pylint:disable=protected-access
- def __init__(self, vault_url: str, credential: TokenCredential, **kwargs: Any) -> None:
- if not credential:
- raise ValueError(
- "credential should be an object supporting the TokenCredential protocol, "
- "such as a credential from azure-identity"
- )
- if not vault_url:
- raise ValueError("vault_url must be the URL of an Azure Key Vault")
-
- try:
- self.api_version = kwargs.pop("api_version", DEFAULT_VERSION)
- # If API version was provided as an enum value, need to make a plain string for 3.11 compatibility
- if hasattr(self.api_version, "value"):
- self.api_version = self.api_version.value
- self._vault_url = vault_url.strip(" /")
-
- client = kwargs.get("generated_client")
- if client:
- # caller provided a configured client -> only models left to initialize
- self._client = client
- models = kwargs.get("generated_models")
- self._models = models or _models
- return
-
- http_logging_policy = HttpLoggingPolicy(**kwargs)
- http_logging_policy.allowed_header_names.update(
- {"x-ms-keyvault-network-info", "x-ms-keyvault-region", "x-ms-keyvault-service-version"}
- )
-
- verify_challenge = kwargs.pop("verify_challenge_resource", True)
- self._client = _KeyVaultClient(
- credential=credential,
- vault_base_url=self._vault_url,
- api_version=self.api_version,
- authentication_policy=ChallengeAuthPolicy(credential, verify_challenge_resource=verify_challenge),
- sdk_moniker=SDK_MONIKER,
- http_logging_policy=http_logging_policy,
- **kwargs,
- )
- self._models = _models
- except ValueError as exc:
- # Ignore pyright error that comes from not identifying ApiVersion as an iterable enum
- raise NotImplementedError(
- f"This package doesn't support API version '{self.api_version}'. "
- + "Supported versions: "
- + f"{', '.join(v.value for v in ApiVersion)}" # pyright: ignore[reportGeneralTypeIssues]
- ) from exc
-
- @property
- def vault_url(self) -> str:
- return self._vault_url
-
- def __enter__(self) -> "KeyVaultClientBase":
- self._client.__enter__()
- return self
-
- def __exit__(self, *args: Any) -> None:
- self._client.__exit__(*args)
-
- def close(self) -> None:
- """Close sockets opened by the client.
-
- Calling this method is unnecessary when using the client as a context manager.
- """
- self._client.close()
-
- @distributed_trace
- def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse:
- """Runs a network request using the client's existing pipeline.
-
- The request URL can be relative to the vault URL. The service API version used for the request is the same as
- the client's unless otherwise specified. This method does not raise if the response is an error; to raise an
- exception, call `raise_for_status()` on the returned response object. For more information about how to send
- custom requests with this method, see https://aka.ms/azsdk/dpcodegen/python/send_request.
-
- :param request: The network request you want to make.
- :type request: ~azure.core.rest.HttpRequest
-
- :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
-
- :return: The response of your network call. Does not do error handling on your response.
- :rtype: ~azure.core.rest.HttpResponse
- """
- request_copy = _format_api_version(request, self.api_version)
- path_format_arguments = {
- "vaultBaseUrl": _SERIALIZER.url("vault_base_url", self._vault_url, "str", skip_quote=True),
- }
- request_copy.url = self._client._client.format_url(request_copy.url, **path_format_arguments)
- return self._client._client.send_request(request_copy, stream=stream, **kwargs)
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/http_challenge.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/http_challenge.py
deleted file mode 100644
index 8b14b999de78..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/http_challenge.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-import base64
-from typing import Dict, MutableMapping, Optional
-from urllib import parse
-
-
-class HttpChallenge(object):
- """An object representing the content of a Key Vault authentication challenge.
-
- :param str request_uri: The URI of the HTTP request that prompted this challenge.
- :param str challenge: The WWW-Authenticate header of the challenge response.
- :param response_headers: Optional. The headers attached to the challenge response.
- :type response_headers: MutableMapping[str, str] or None
- """
-
- def __init__(
- self, request_uri: str, challenge: str, response_headers: "Optional[MutableMapping[str, str]]" = None
- ) -> None:
- """Parses an HTTP WWW-Authentication Bearer challenge from a server.
-
- Example challenge with claims:
- Bearer authorization="https://login.windows-ppe.net/", error="invalid_token",
- error_description="User session has been revoked",
- claims="eyJhY2Nlc3NfdG9rZW4iOnsibmJmIjp7ImVzc2VudGlhbCI6dHJ1ZSwgInZhbHVlIjoiMTYwMzc0MjgwMCJ9fX0="
- """
- self.source_authority = self._validate_request_uri(request_uri)
- self.source_uri = request_uri
- self._parameters: "Dict[str, str]" = {}
-
- # get the scheme of the challenge and remove from the challenge string
- trimmed_challenge = self._validate_challenge(challenge)
- split_challenge = trimmed_challenge.split(" ", 1)
- self.scheme = split_challenge[0]
- trimmed_challenge = split_challenge[1]
-
- self.claims = None
- # split trimmed challenge into comma-separated name=value pairs. Values are expected
- # to be surrounded by quotes which are stripped here.
- for item in trimmed_challenge.split(","):
- # Special case for claims, which can contain = symbols as padding. Assume at most one claim per challenge
- if "claims=" in item:
- encoded_claims = item[item.index("=") + 1 :].strip(" \"'")
- padding_needed = -len(encoded_claims) % 4
- try:
- decoded_claims = base64.urlsafe_b64decode(encoded_claims + "=" * padding_needed).decode()
- self.claims = decoded_claims
- except Exception: # pylint:disable=broad-except
- continue
- # process name=value pairs
- else:
- comps = item.split("=")
- if len(comps) == 2:
- key = comps[0].strip(' "')
- value = comps[1].strip(' "')
- if key:
- self._parameters[key] = value
-
- # minimum set of parameters
- if not self._parameters:
- raise ValueError("Invalid challenge parameters")
-
- # must specify authorization or authorization_uri
- if "authorization" not in self._parameters and "authorization_uri" not in self._parameters:
- raise ValueError("Invalid challenge parameters")
-
- authorization_uri = self.get_authorization_server()
- # the authorization server URI should look something like https://login.windows.net/tenant-id
- raw_uri_path = str(parse.urlparse(authorization_uri).path)
- uri_path = raw_uri_path.lstrip("/")
- self.tenant_id = uri_path.split("/", maxsplit=1)[0] or None
-
- # if the response headers were supplied
- if response_headers:
- # get the message signing key and message key encryption key from the headers
- self.server_signature_key = response_headers.get("x-ms-message-signing-key", None)
- self.server_encryption_key = response_headers.get("x-ms-message-encryption-key", None)
-
- def is_bearer_challenge(self) -> bool:
- """Tests whether the HttpChallenge is a Bearer challenge.
-
- :returns: True if the challenge is a Bearer challenge; False otherwise.
- :rtype: bool
- """
- if not self.scheme:
- return False
-
- return self.scheme.lower() == "bearer"
-
- def is_pop_challenge(self) -> bool:
- """Tests whether the HttpChallenge is a proof of possession challenge.
-
- :returns: True if the challenge is a proof of possession challenge; False otherwise.
- :rtype: bool
- """
- if not self.scheme:
- return False
-
- return self.scheme.lower() == "pop"
-
- def get_value(self, key: str) -> "Optional[str]":
- return self._parameters.get(key)
-
- def get_authorization_server(self) -> str:
- """Returns the URI for the authorization server if present, otherwise an empty string.
-
- :returns: The URI for the authorization server if present, otherwise an empty string.
- :rtype: str
- """
- value = ""
- for key in ["authorization_uri", "authorization"]:
- value = self.get_value(key) or ""
- if value:
- break
- return value
-
- def get_resource(self) -> str:
- """Returns the resource if present, otherwise an empty string.
-
- :returns: The challenge resource if present, otherwise an empty string.
- :rtype: str
- """
- return self.get_value("resource") or ""
-
- def get_scope(self) -> str:
- """Returns the scope if present, otherwise an empty string.
-
- :returns: The challenge scope if present, otherwise an empty string.
- :rtype: str
- """
- return self.get_value("scope") or ""
-
- def supports_pop(self) -> bool:
- """Returns True if the challenge supports proof of possession token auth; False otherwise.
-
- :returns: True if the challenge supports proof of possession token auth; False otherwise.
- :rtype: bool
- """
- return self._parameters.get("supportspop", "").lower() == "true"
-
- def supports_message_protection(self) -> bool:
- """Returns True if the challenge vault supports message protection; False otherwise.
-
- :returns: True if the challenge vault supports message protection; False otherwise.
- :rtype: bool
- """
- return self.supports_pop() and self.server_encryption_key and self.server_signature_key # type: ignore
-
- def _validate_challenge(
- self, challenge: str
- ) -> str: # pylint:disable=bad-option-value,useless-option-value,no-self-use
- """Verifies that the challenge is a valid auth challenge and returns the key=value pairs.
-
- :param str challenge: The WWW-Authenticate header of the challenge response.
-
- :returns: The challenge key/value pairs, with whitespace removed, as a string.
- :rtype: str
- """
- if not challenge:
- raise ValueError("Challenge cannot be empty")
-
- return challenge.strip()
-
- def _validate_request_uri(
- self, uri: str
- ) -> str: # pylint:disable=bad-option-value,useless-option-value,no-self-use
- """Extracts the host authority from the given URI.
-
- :param str uri: The URI of the HTTP request that prompted the challenge.
-
- :returns: The challenge host authority.
- :rtype: str
- """
- if not uri:
- raise ValueError("request_uri cannot be empty")
-
- parsed = parse.urlparse(uri)
- if not parsed.netloc:
- raise ValueError("request_uri must be an absolute URI")
-
- if parsed.scheme.lower() not in ["http", "https"]:
- raise ValueError("request_uri must be HTTP or HTTPS")
-
- return parsed.netloc
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/http_challenge_cache.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/http_challenge_cache.py
deleted file mode 100644
index 99f32091e24b..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/http_challenge_cache.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-import threading
-from typing import Dict, Optional
-from urllib import parse
-
-from .http_challenge import HttpChallenge
-
-
-_cache: "Dict[str, HttpChallenge]" = {}
-_lock = threading.Lock()
-
-
-def get_challenge_for_url(url: str) -> "Optional[HttpChallenge]":
- """Gets the challenge for the cached URL.
-
- :param str url: the URL the challenge is cached for.
-
- :returns: The challenge for the cached request URL, or None if the request URL isn't cached.
- :rtype: HttpChallenge or None
- """
-
- if not url:
- raise ValueError("URL cannot be None")
-
- key = _get_cache_key(url)
-
- with _lock:
- return _cache.get(key.lower())
-
-
-def _get_cache_key(url: str) -> str:
- """Use the URL's netloc as cache key except when the URL specifies the default port for its scheme. In that case
- use the netloc without the port. That is to say, https://foo.bar and https://foo.bar:443 are considered equivalent.
-
- This equivalency prevents an unnecessary challenge when using Key Vault's paging API. The Key Vault client doesn't
- specify ports, but Key Vault's next page links do, so a redundant challenge would otherwise be executed when the
- client requests the next page.
-
- :param str url: The HTTP request URL.
-
- :returns: The URL's `netloc`, minus any port attached to the URL.
- :rtype: str
- """
-
- parsed = parse.urlparse(url)
- if parsed.scheme == "https" and parsed.port == 443:
- return parsed.netloc[:-4]
- return parsed.netloc
-
-
-def remove_challenge_for_url(url: str) -> None:
- """Removes the cached challenge for the specified URL.
-
- :param str url: the URL for which to remove the cached challenge
- """
- if not url:
- raise ValueError("URL cannot be empty")
-
- key = _get_cache_key(url)
- with _lock:
- del _cache[key.lower()]
-
-
-def set_challenge_for_url(url: str, challenge: "HttpChallenge") -> None:
- """Caches the challenge for the specified URL.
-
- :param str url: the URL for which to cache the challenge
- :param challenge: the challenge to cache
- :type challenge: HttpChallenge
- """
- if not url:
- raise ValueError("URL cannot be empty")
-
- if not challenge:
- raise ValueError("Challenge cannot be empty")
-
- src_url = parse.urlparse(url)
- if src_url.netloc.lower() != challenge.source_authority.lower():
- raise ValueError("Source URL and Challenge URL do not match")
-
- key = _get_cache_key(url)
- with _lock:
- _cache[key.lower()] = challenge
-
-
-def clear() -> None:
- """Clears the cache."""
-
- with _lock:
- _cache.clear()
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_version.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_version.py
deleted file mode 100644
index 81a6e1e8366d..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_version.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-
-VERSION = "4.10.1"
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/aio/__init__.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/aio/__init__.py
deleted file mode 100644
index 44967833f2df..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/aio/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-from ._client import SecretClient
-
-__all__ = ["SecretClient"]
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/aio/_client.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/aio/_client.py
deleted file mode 100644
index 20904cf11646..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/aio/_client.py
+++ /dev/null
@@ -1,452 +0,0 @@
-# ------------------------------------
-# Copyright (c) Microsoft Corporation.
-# Licensed under the MIT License.
-# ------------------------------------
-from datetime import datetime
-from typing import Any, cast, Dict, Optional
-from functools import partial
-
-from azure.core.tracing.decorator import distributed_trace
-from azure.core.tracing.decorator_async import distributed_trace_async
-from azure.core.async_paging import AsyncItemPaged
-
-from .._models import KeyVaultSecret, DeletedSecret, SecretProperties
-from .._shared import AsyncKeyVaultClientBase
-from .._shared._polling_async import AsyncDeleteRecoverPollingMethod
-
-
-class SecretClient(AsyncKeyVaultClientBase):
- """A high-level asynchronous interface for managing a vault's secrets.
-
- :param str vault_url: URL of the vault the client will access. This is also called the vault's "DNS Name".
- You should validate that this URL references a valid Key Vault resource. See https://aka.ms/azsdk/blog/vault-uri
- for details.
- :param credential: An object which can provide an access token for the vault, such as a credential from
- :mod:`azure.identity.aio`
- :type credential: ~azure.core.credentials_async.AsyncTokenCredential
-
- :keyword api_version: Version of the service API to use. Defaults to the most recent.
- :paramtype api_version: ~azure.keyvault.secrets.ApiVersion or str
- :keyword bool verify_challenge_resource: Whether to verify the authentication challenge resource matches the Key
- Vault domain. Defaults to True.
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets_async.py
- :start-after: [START create_secret_client]
- :end-before: [END create_secret_client]
- :language: python
- :caption: Create a new ``SecretClient``
- :dedent: 4
- """
-
- # pylint:disable=protected-access
-
- @distributed_trace_async
- async def get_secret(self, name: str, version: Optional[str] = None, **kwargs: Any) -> KeyVaultSecret:
- """Get a secret. Requires the secrets/get permission.
-
- :param str name: The name of the secret
- :param str version: (optional) Version of the secret to get. If unspecified, gets the latest version.
-
- :returns: The fetched secret.
- :rtype: ~azure.keyvault.secrets.KeyVaultSecret
-
- :raises ~azure.core.exceptions.ResourceNotFoundError or ~azure.core.exceptions.HttpResponseError:
- the former if the secret doesn't exist; the latter for other errors
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets_async.py
- :start-after: [START get_secret]
- :end-before: [END get_secret]
- :language: python
- :caption: Get a secret
- :dedent: 8
- """
- bundle = await self._client.get_secret(name, version or "", **kwargs)
- return KeyVaultSecret._from_secret_bundle(bundle)
-
- @distributed_trace_async
- async def set_secret(
- self,
- name: str,
- value: str,
- *,
- enabled: Optional[bool] = None,
- tags: Optional[Dict[str, str]] = None,
- content_type: Optional[str] = None,
- not_before: Optional[datetime] = None,
- expires_on: Optional[datetime] = None,
- **kwargs: Any,
- ) -> KeyVaultSecret:
- """Set a secret value. If `name` is in use, create a new version of the secret. If not, create a new secret.
-
- Requires secrets/set permission.
-
- :param str name: The name of the secret
- :param str value: The value of the secret
-
- :keyword bool enabled: Whether the secret is enabled for use.
- :keyword tags: Application specific metadata in the form of key-value pairs.
- :paramtype tags: Dict[str, str] or None
- :keyword str content_type: An arbitrary string indicating the type of the secret, e.g. 'password'
- :keyword ~datetime.datetime not_before: Not before date of the secret in UTC
- :keyword ~datetime.datetime expires_on: Expiry date of the secret in UTC
-
- :returns: The created or updated secret.
- :rtype: ~azure.keyvault.secrets.KeyVaultSecret
-
- :raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets_async.py
- :start-after: [START set_secret]
- :end-before: [END set_secret]
- :language: python
- :caption: Set a secret's value
- :dedent: 8
- """
- if enabled is not None or not_before is not None or expires_on is not None:
- attributes = self._models.SecretAttributes(enabled=enabled, not_before=not_before, expires=expires_on)
- else:
- attributes = None
-
- parameters = self._models.SecretSetParameters(
- value=value,
- tags=tags,
- content_type=content_type,
- secret_attributes=attributes
- )
-
- bundle = await self._client.set_secret(
- name,
- parameters=parameters,
- **kwargs
- )
- return KeyVaultSecret._from_secret_bundle(bundle)
-
- @distributed_trace_async
- async def update_secret_properties(
- self,
- name: str,
- version: Optional[str] = None,
- *,
- enabled: Optional[bool] = None,
- tags: Optional[Dict[str, str]] = None,
- content_type: Optional[str] = None,
- not_before: Optional[datetime] = None,
- expires_on: Optional[datetime] = None,
- **kwargs: Any,
- ) -> SecretProperties:
- """Update properties of a secret other than its value. Requires secrets/set permission.
-
- This method updates properties of the secret, such as whether it's enabled, but can't change the secret's
- value. Use :func:`set_secret` to change the secret's value.
-
- :param str name: Name of the secret
- :param str version: (optional) Version of the secret to update. If unspecified, the latest version is updated.
-
- :keyword bool enabled: Whether the secret is enabled for use.
- :keyword tags: Application specific metadata in the form of key-value pairs.
- :paramtype tags: Dict[str, str] or None
- :keyword str content_type: An arbitrary string indicating the type of the secret, e.g. 'password'
- :keyword ~datetime.datetime not_before: Not before date of the secret in UTC
- :keyword ~datetime.datetime expires_on: Expiry date of the secret in UTC
-
- :returns: The updated secret properties.
- :rtype: ~azure.keyvault.secrets.SecretProperties
-
- :raises ~azure.core.exceptions.ResourceNotFoundError or ~azure.core.exceptions.HttpResponseError:
- the former if the secret doesn't exist; the latter for other errors
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets_async.py
- :start-after: [START update_secret]
- :end-before: [END update_secret]
- :language: python
- :caption: Updates a secret's attributes
- :dedent: 8
- """
- if enabled is not None or not_before is not None or expires_on is not None:
- attributes = self._models.SecretAttributes(enabled=enabled, not_before=not_before, expires=expires_on)
- else:
- attributes = None
-
- parameters = self._models.SecretUpdateParameters(
- content_type=content_type,
- secret_attributes=attributes,
- tags=tags,
- )
-
- bundle = await self._client.update_secret(
- name,
- secret_version=version or "",
- parameters=parameters,
- **kwargs
- )
- return SecretProperties._from_secret_bundle(bundle) # pylint: disable=protected-access
-
- @distributed_trace
- def list_properties_of_secrets(self, **kwargs: Any) -> AsyncItemPaged[SecretProperties]:
- """List identifiers and attributes of all secrets in the vault. Requires secrets/list permission.
-
- List items don't include secret values. Use :func:`get_secret` to get a secret's value.
-
- :returns: An iterator of secrets
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.secrets.SecretProperties]
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets_async.py
- :start-after: [START list_secrets]
- :end-before: [END list_secrets]
- :language: python
- :caption: Lists all secrets
- :dedent: 8
- """
- return self._client.get_secrets(
- maxresults=kwargs.pop("max_page_size", None),
- cls=lambda objs: [SecretProperties._from_secret_item(x) for x in objs],
- **kwargs
- )
-
- @distributed_trace
- def list_properties_of_secret_versions(self, name: str, **kwargs: Any) -> AsyncItemPaged[SecretProperties]:
- """List properties of all versions of a secret, excluding their values. Requires secrets/list permission.
-
- List items don't include secret values. Use :func:`get_secret` to get a secret's value.
-
- :param str name: Name of the secret
-
- :returns: An iterator of secrets, excluding their values
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.secrets.SecretProperties]
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets_async.py
- :start-after: [START list_properties_of_secret_versions]
- :end-before: [END list_properties_of_secret_versions]
- :language: python
- :caption: List all versions of a secret
- :dedent: 8
- """
- return self._client.get_secret_versions(
- name,
- maxresults=kwargs.pop("max_page_size", None),
- cls=lambda objs: [SecretProperties._from_secret_item(x) for x in objs],
- **kwargs
- )
-
- @distributed_trace_async
- async def backup_secret(self, name: str, **kwargs: Any) -> bytes:
- """Back up a secret in a protected form useable only by Azure Key Vault. Requires secrets/backup permission.
-
- :param str name: Name of the secret to back up
-
- :returns: The backup result, in a protected bytes format that can only be used by Azure Key Vault.
- :rtype: bytes
-
- :raises ~azure.core.exceptions.ResourceNotFoundError or ~azure.core.exceptions.HttpResponseError:
- the former if the secret doesn't exist; the latter for other errors
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets_async.py
- :start-after: [START backup_secret]
- :end-before: [END backup_secret]
- :language: python
- :caption: Back up a secret
- :dedent: 8
- """
- backup_result = await self._client.backup_secret(name, **kwargs)
- return cast(bytes, backup_result.value)
-
- @distributed_trace_async
- async def restore_secret_backup(self, backup: bytes, **kwargs: Any) -> SecretProperties:
- """Restore a backed up secret. Requires the secrets/restore permission.
-
- :param bytes backup: A secret backup as returned by :func:`backup_secret`
-
- :returns: The restored secret
- :rtype: ~azure.keyvault.secrets.SecretProperties
-
- :raises ~azure.core.exceptions.ResourceExistsError or ~azure.core.exceptions.HttpResponseError:
- the former if the secret's name is already in use; the latter for other errors
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets_async.py
- :start-after: [START restore_secret_backup]
- :end-before: [END restore_secret_backup]
- :language: python
- :caption: Restore a backed up secret
- :dedent: 8
- """
- bundle = await self._client.restore_secret(
- parameters=self._models.SecretRestoreParameters(secret_bundle_backup=backup),
- **kwargs
- )
- return SecretProperties._from_secret_bundle(bundle)
-
- @distributed_trace_async
- async def delete_secret(self, name: str, **kwargs: Any) -> DeletedSecret:
- """Delete all versions of a secret. Requires secrets/delete permission.
-
- If the vault has soft-delete enabled, deletion may take several seconds to complete.
-
- :param str name: Name of the secret to delete.
-
- :returns: The deleted secret.
- :rtype: ~azure.keyvault.secrets.DeletedSecret
-
- :raises ~azure.core.exceptions.ResourceNotFoundError or ~azure.core.exceptions.HttpResponseError:
- the former if the secret doesn't exist; the latter for other errors
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets_async.py
- :start-after: [START delete_secret]
- :end-before: [END delete_secret]
- :language: python
- :caption: Delete a secret
- :dedent: 8
- """
- polling_interval = kwargs.pop("_polling_interval", None)
- if polling_interval is None:
- polling_interval = 2
- # Ignore pyright warning about return type not being iterable because we use `cls` to return a tuple
- pipeline_response, deleted_secret_bundle = await self._client.delete_secret(
- secret_name=name,
- cls=lambda pipeline_response, deserialized, _: (pipeline_response, deserialized),
- **kwargs,
- ) # pyright: ignore[reportGeneralTypeIssues]
- deleted_secret = DeletedSecret._from_deleted_secret_bundle(deleted_secret_bundle)
-
- polling_method = AsyncDeleteRecoverPollingMethod(
- # no recovery ID means soft-delete is disabled, in which case we initialize the poller as finished
- pipeline_response=pipeline_response,
- command=partial(self.get_deleted_secret, name=name, **kwargs),
- final_resource=deleted_secret,
- finished=deleted_secret.recovery_id is None,
- interval=polling_interval,
- )
- await polling_method.run()
-
- return polling_method.resource()
-
- @distributed_trace_async
- async def get_deleted_secret(self, name: str, **kwargs: Any) -> DeletedSecret:
- """Get a deleted secret. Possible only in vaults with soft-delete enabled. Requires secrets/get permission.
-
- :param str name: Name of the deleted secret
-
- :returns: The deleted secret.
- :rtype: ~azure.keyvault.secrets.DeletedSecret
-
- :raises ~azure.core.exceptions.ResourceNotFoundError or ~azure.core.exceptions.HttpResponseError:
- the former if the deleted secret doesn't exist; the latter for other errors
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets_async.py
- :start-after: [START get_deleted_secret]
- :end-before: [END get_deleted_secret]
- :language: python
- :caption: Get a deleted secret
- :dedent: 8
- """
- bundle = await self._client.get_deleted_secret(name, **kwargs)
- return DeletedSecret._from_deleted_secret_bundle(bundle)
-
- @distributed_trace
- def list_deleted_secrets(self, **kwargs: Any) -> AsyncItemPaged[DeletedSecret]:
- """Lists all deleted secrets. Possible only in vaults with soft-delete enabled.
-
- Requires secrets/list permission.
-
- :returns: An iterator of deleted secrets, excluding their values
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.secrets.DeletedSecret]
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets_async.py
- :start-after: [START list_deleted_secrets]
- :end-before: [END list_deleted_secrets]
- :language: python
- :caption: Lists deleted secrets
- :dedent: 8
- """
- return self._client.get_deleted_secrets(
- maxresults=kwargs.pop("max_page_size", None),
- cls=lambda objs: [DeletedSecret._from_deleted_secret_item(x) for x in objs],
- **kwargs
- )
-
- @distributed_trace_async
- async def purge_deleted_secret(self, name: str, **kwargs: Any) -> None:
- """Permanently delete a deleted secret. Possible only in vaults with soft-delete enabled.
-
- Performs an irreversible deletion of the specified secret, without possibility for recovery. The operation is
- not available if the :py:attr:`~azure.keyvault.secrets.SecretProperties.recovery_level` does not specify
- 'Purgeable'. This method is only necessary for purging a secret before its
- :py:attr:`~azure.keyvault.secrets.DeletedSecret.scheduled_purge_date`.
-
- Requires secrets/purge permission.
-
- :param str name: Name of the deleted secret to purge
-
- :returns: None
-
- :raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # if the vault has soft-delete enabled, purge permanently deletes the secret
- # (with soft-delete disabled, delete_secret is permanent)
- await secret_client.purge_deleted_secret("secret-name")
-
- """
- await self._client.purge_deleted_secret(name, **kwargs)
-
- @distributed_trace_async
- async def recover_deleted_secret(self, name: str, **kwargs: Any) -> SecretProperties:
- """Recover a deleted secret to its latest version. This is possible only in vaults with soft-delete enabled.
-
- Requires the secrets/recover permission. If the vault does not have soft-delete enabled, :func:`delete_secret`
- is permanent, and this method will raise an error. Attempting to recover a non-deleted secret will also raise an
- error.
-
- :param str name: Name of the deleted secret to recover
-
- :returns: The recovered secret's properties.
- :rtype: ~azure.keyvault.secrets.SecretProperties
-
- :raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. literalinclude:: ../tests/test_samples_secrets_async.py
- :start-after: [START recover_deleted_secret]
- :end-before: [END recover_deleted_secret]
- :language: python
- :caption: Recover a deleted secret
- :dedent: 8
- """
- polling_interval = kwargs.pop("_polling_interval", None)
- if polling_interval is None:
- polling_interval = 2
- # Ignore pyright warning about return type not being iterable because we use `cls` to return a tuple
- pipeline_response, recovered_secret_bundle = await self._client.recover_deleted_secret(
- secret_name=name,
- cls=lambda pipeline_response, deserialized, _: (pipeline_response, deserialized),
- **kwargs,
- ) # pyright: ignore[reportGeneralTypeIssues]
- recovered_secret = SecretProperties._from_secret_bundle(recovered_secret_bundle)
-
- command = partial(self.get_secret, name=name, **kwargs)
- polling_method = AsyncDeleteRecoverPollingMethod(
- pipeline_response=pipeline_response,
- command=command,
- final_resource=recovered_secret,
- finished=False,
- interval=polling_interval
- )
- await polling_method.run()
-
- return polling_method.resource()
-
- async def __aenter__(self) -> "SecretClient":
- await self._client.__aenter__()
- return self
diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/py.typed b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/py.typed
deleted file mode 100644
index e69de29bb2d1..000000000000
diff --git a/sdk/keyvault/azure-keyvault-secrets/pyproject.toml b/sdk/keyvault/azure-keyvault-secrets/pyproject.toml
index b66730034d2c..2451cb712228 100644
--- a/sdk/keyvault/azure-keyvault-secrets/pyproject.toml
+++ b/sdk/keyvault/azure-keyvault-secrets/pyproject.toml
@@ -3,3 +3,6 @@
[tool.azure-sdk-conda]
in_bundle = true
bundle_name = "azure-keyvault"
+
+[packaging]
+auto_update = false
diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations.py b/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations.py
index 6cf17e27fb66..e03c2f3cace0 100644
--- a/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations.py
+++ b/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations.py
@@ -1,3 +1,4 @@
+# pylint: disable=line-too-long,useless-suppression
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations_async.py b/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations_async.py
index 3412407cee50..61ecc9baf868 100644
--- a/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations_async.py
+++ b/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations_async.py
@@ -1,3 +1,4 @@
+# pylint: disable=line-too-long,useless-suppression
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
@@ -8,6 +9,7 @@
from azure.keyvault.secrets.aio import SecretClient
from azure.identity.aio import DefaultAzureCredential
+
# ----------------------------------------------------------------------------------------------------------
# Prerequisites:
# 1. An Azure Key Vault (https://learn.microsoft.com/azure/key-vault/quick-create-cli)
@@ -74,4 +76,4 @@ async def run_sample():
if __name__ == "__main__":
- asyncio.run(run_sample())
\ No newline at end of file
+ asyncio.run(run_sample())
diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/hello_world.py b/sdk/keyvault/azure-keyvault-secrets/samples/hello_world.py
index f9c4bc8f96fe..3a7748229984 100644
--- a/sdk/keyvault/azure-keyvault-secrets/samples/hello_world.py
+++ b/sdk/keyvault/azure-keyvault-secrets/samples/hello_world.py
@@ -1,3 +1,4 @@
+# pylint: disable=line-too-long,useless-suppression
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/hello_world_async.py b/sdk/keyvault/azure-keyvault-secrets/samples/hello_world_async.py
index ff1e1de91d97..b42e93ce1234 100644
--- a/sdk/keyvault/azure-keyvault-secrets/samples/hello_world_async.py
+++ b/sdk/keyvault/azure-keyvault-secrets/samples/hello_world_async.py
@@ -1,3 +1,4 @@
+# pylint: disable=line-too-long,useless-suppression
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
@@ -9,6 +10,7 @@
from azure.keyvault.secrets.aio import SecretClient
from azure.identity.aio import DefaultAzureCredential
+
# ----------------------------------------------------------------------------------------------------------
# Prerequisites:
# 1. An Azure Key Vault (https://learn.microsoft.com/azure/key-vault/quick-create-cli)
diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/list_operations.py b/sdk/keyvault/azure-keyvault-secrets/samples/list_operations.py
index bf31a8a86fc5..207d938c0720 100644
--- a/sdk/keyvault/azure-keyvault-secrets/samples/list_operations.py
+++ b/sdk/keyvault/azure-keyvault-secrets/samples/list_operations.py
@@ -1,3 +1,4 @@
+# pylint: disable=line-too-long,useless-suppression
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
@@ -58,17 +59,13 @@
for secret in secrets:
assert secret.name
retrieved_secret = client.get_secret(secret.name)
- print(
- f"Secret with name '{retrieved_secret.name}' and value {retrieved_secret.name} was found."
- )
+ print(f"Secret with name '{retrieved_secret.name}' and value {retrieved_secret.name} was found.")
# The bank account password got updated, so you want to update the secret in Key Vault to ensure it reflects the
# new password. Calling set_secret on an existing secret creates a new version of the secret in the Key Vault
# with the new value.
updated_secret = client.set_secret(bank_secret.name, "newSecretValue")
-print(
- f"Secret with name '{updated_secret.name}' was updated with new value '{updated_secret.value}'"
-)
+print(f"Secret with name '{updated_secret.name}' was updated with new value '{updated_secret.value}'")
# You need to check all the different values your bank account password secret had previously. Lets print all
# the versions of this secret.
@@ -89,6 +86,4 @@
print("\n.. List deleted secrets from the Key Vault")
deleted_secrets = client.list_deleted_secrets()
for deleted_secret in deleted_secrets:
- print(
- f"Secret with name '{deleted_secret.name}' has recovery id '{deleted_secret.recovery_id}'"
- )
+ print(f"Secret with name '{deleted_secret.name}' has recovery id '{deleted_secret.recovery_id}'")
diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/list_operations_async.py b/sdk/keyvault/azure-keyvault-secrets/samples/list_operations_async.py
index 7a591519b0a0..6c02546e7e58 100644
--- a/sdk/keyvault/azure-keyvault-secrets/samples/list_operations_async.py
+++ b/sdk/keyvault/azure-keyvault-secrets/samples/list_operations_async.py
@@ -1,3 +1,4 @@
+# pylint: disable=line-too-long,useless-suppression
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
@@ -8,6 +9,7 @@
from azure.keyvault.secrets.aio import SecretClient
from azure.identity.aio import DefaultAzureCredential
+
# ----------------------------------------------------------------------------------------------------------
# Prerequisites:
# 1. An Azure Key Vault (https://learn.microsoft.com/azure/key-vault/quick-create-cli)
@@ -64,9 +66,7 @@ async def run_sample():
# new password. Calling set_secret on an existing secret creates a new version of the secret in the Key Vault
# with the new value.
updated_secret = await client.set_secret(bank_secret.name, "newSecretValue")
- print(
- f"Secret with name '{updated_secret.name}' was updated with new value '{updated_secret.value}'"
- )
+ print(f"Secret with name '{updated_secret.name}' was updated with new value '{updated_secret.value}'")
# You need to check all the different values your bank account password secret had previously. Lets print all
# the versions of this secret.
@@ -84,9 +84,7 @@ async def run_sample():
print("\n.. List deleted secrets from the Key Vault")
deleted_secrets = client.list_deleted_secrets()
async for deleted_secret in deleted_secrets:
- print(
- f"Secret with name '{deleted_secret.name}' has recovery id '{deleted_secret.recovery_id}'"
- )
+ print(f"Secret with name '{deleted_secret.name}' has recovery id '{deleted_secret.recovery_id}'")
print("\nrun_sample done")
await credential.close()
diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations.py b/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations.py
index c22aac38188e..13d810d778bb 100644
--- a/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations.py
+++ b/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations.py
@@ -1,3 +1,4 @@
+# pylint: disable=line-too-long,useless-suppression
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations_async.py b/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations_async.py
index b69b0bcfb4ef..60e690ec9939 100644
--- a/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations_async.py
+++ b/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations_async.py
@@ -1,3 +1,4 @@
+# pylint: disable=line-too-long,useless-suppression
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
@@ -8,6 +9,7 @@
from azure.keyvault.secrets.aio import SecretClient
from azure.identity.aio import DefaultAzureCredential
+
# ----------------------------------------------------------------------------------------------------------
# Prerequisites:
# 1. An Azure Key Vault (https://learn.microsoft.com/azure/key-vault/quick-create-cli)
diff --git a/sdk/keyvault/azure-keyvault-secrets/sdk_packaging.toml b/sdk/keyvault/azure-keyvault-secrets/sdk_packaging.toml
deleted file mode 100644
index e7687fdae93b..000000000000
--- a/sdk/keyvault/azure-keyvault-secrets/sdk_packaging.toml
+++ /dev/null
@@ -1,2 +0,0 @@
-[packaging]
-auto_update = false
\ No newline at end of file
diff --git a/sdk/keyvault/azure-keyvault-secrets/setup.py b/sdk/keyvault/azure-keyvault-secrets/setup.py
index 62f8b4627517..25c0b0a9a6d3 100644
--- a/sdk/keyvault/azure-keyvault-secrets/setup.py
+++ b/sdk/keyvault/azure-keyvault-secrets/setup.py
@@ -14,9 +14,10 @@
PACKAGE_NAME = "azure-keyvault-secrets"
PACKAGE_PPRINT_NAME = "Key Vault Secrets"
+PACKAGE_NAMESPACE = "azure.keyvault.secrets._generated"
-# a-b-c => a/b/c
-package_folder_path = PACKAGE_NAME.replace("-", "/")
+# a.b.c => a/b/c
+package_folder_path = PACKAGE_NAMESPACE.replace(".", "/")
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, "_version.py"), "r") as fd:
@@ -29,7 +30,6 @@
setup(
name=PACKAGE_NAME,
version=version,
- include_package_data=True,
description="Microsoft Corporation {} Client Library for Python".format(PACKAGE_PPRINT_NAME),
long_description=open("README.md", "r").read(),
long_description_content_type="text/markdown",
@@ -53,16 +53,20 @@
zip_safe=False,
packages=find_packages(
exclude=[
- "samples",
"tests",
# Exclude packages that will be covered by PEP420 or nspkg
"azure",
"azure.keyvault",
+ "azure.keyvault.secrets",
]
),
+ include_package_data=True,
+ package_data={
+ "azure.keyvault.secrets._generated": ["py.typed"],
+ },
install_requires=[
"isodate>=0.6.1",
- "azure-core>=1.31.0",
+ "azure-core>=1.37.0",
"typing-extensions>=4.6.0",
],
python_requires=">=3.9",
diff --git a/sdk/keyvault/azure-keyvault-secrets/tests/conftest.py b/sdk/keyvault/azure-keyvault-secrets/tests/conftest.py
index 96ff292c3f2c..e630f5c4ab81 100644
--- a/sdk/keyvault/azure-keyvault-secrets/tests/conftest.py
+++ b/sdk/keyvault/azure-keyvault-secrets/tests/conftest.py
@@ -31,7 +31,7 @@
test_proxy,
add_oauth_response_sanitizer,
add_general_regex_sanitizer,
- remove_batch_sanitizers
+ remove_batch_sanitizers,
)
diff --git a/sdk/keyvault/azure-keyvault-secrets/tests/test_polling_method.py b/sdk/keyvault/azure-keyvault-secrets/tests/test_polling_method.py
index fded987db56c..22189629419d 100644
--- a/sdk/keyvault/azure-keyvault-secrets/tests/test_polling_method.py
+++ b/sdk/keyvault/azure-keyvault-secrets/tests/test_polling_method.py
@@ -74,7 +74,9 @@ def command():
_command.operation_complete = True
resource = object()
- polling_method = DeleteRecoverPollingMethod(mock_pipeline_response, command, final_resource=resource, finished=False)
+ polling_method = DeleteRecoverPollingMethod(
+ mock_pipeline_response, command, final_resource=resource, finished=False
+ )
assert not polling_method.finished()
with mock.patch(SLEEP) as sleep:
@@ -102,7 +104,9 @@ def test_final_resource():
assert final_resource is resource
command = mock.Mock()
- polling_method = DeleteRecoverPollingMethod(mock_pipeline_response, command, final_resource=resource, finished=False)
+ polling_method = DeleteRecoverPollingMethod(
+ mock_pipeline_response, command, final_resource=resource, finished=False
+ )
assert polling_method.resource() is resource
polling_method.run()
diff --git a/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_async.py b/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_async.py
index 435d6c6d04b1..8408e6e35890 100644
--- a/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_async.py
+++ b/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_async.py
@@ -390,6 +390,7 @@ async def test_40x_handling(self, client, **kwargs):
# Test that 409 is raised correctly (`set_secret` shouldn't actually trigger this, but for raising behavior)
async def run(*_, **__):
return Mock(http_response=Mock(status_code=409))
+
with patch.object(client._client._client._pipeline, "run", run):
with pytest.raises(ResourceExistsError):
await client.set_secret("...", "...")
diff --git a/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_client.py b/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_client.py
index ed09c2759c48..d92537cc0f19 100644
--- a/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_client.py
+++ b/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_client.py
@@ -379,6 +379,7 @@ def test_40x_handling(self, client, **kwargs):
# Test that 409 is raised correctly (`set_secret` shouldn't actually trigger this, but for raising behavior)
def run(*_, **__):
return Mock(http_response=Mock(status_code=409))
+
with patch.object(client._client._client._pipeline, "run", run):
with pytest.raises(ResourceExistsError):
client.set_secret("...", "...")
diff --git a/sdk/keyvault/azure-keyvault-secrets/tsp-location.yaml b/sdk/keyvault/azure-keyvault-secrets/tsp-location.yaml
index 62991cf40a7e..b89bc237822b 100644
--- a/sdk/keyvault/azure-keyvault-secrets/tsp-location.yaml
+++ b/sdk/keyvault/azure-keyvault-secrets/tsp-location.yaml
@@ -1,5 +1,5 @@
directory: specification/keyvault/Security.KeyVault.Secrets
-commit: 9015889a4cae078355b642b3c66d0f18ce41e075
+commit: 74cc90c49189a079b3cc93fde9c9ad76742f0184
repo: Azure/azure-rest-api-specs
-additionalDirectories:
-- specification/keyvault/Security.KeyVault.Common/
+additionalDirectories:
+- specification/keyvault/Security.KeyVault.Common