From 39dc4d29a57a7fa4442a1b412772aba160e42260 Mon Sep 17 00:00:00 2001 From: Nicola Camillucci Date: Wed, 8 Apr 2026 15:52:34 +0100 Subject: [PATCH 01/11] Updated TypeSpec files --- .../azure-keyvault-administration/tsp-location.yaml | 9 ++------- sdk/keyvault/azure-keyvault-keys/tsp-location.yaml | 6 ++---- 2 files changed, 4 insertions(+), 11 deletions(-) diff --git a/sdk/keyvault/azure-keyvault-administration/tsp-location.yaml b/sdk/keyvault/azure-keyvault-administration/tsp-location.yaml index 99be9b2659a0..26fe3ba24c63 100644 --- a/sdk/keyvault/azure-keyvault-administration/tsp-location.yaml +++ b/sdk/keyvault/azure-keyvault-administration/tsp-location.yaml @@ -1,8 +1,3 @@ -directory: specification/keyvault/Security.KeyVault.Administration -commit: 9015889a4cae078355b642b3c66d0f18ce41e075 +directory: specification/keyvault/data-plane/Administration +commit: f6bd06be22baf3a18504ffef0f590230850953e5 repo: Azure/azure-rest-api-specs -additionalDirectories: -- specification/keyvault/Security.KeyVault.BackupRestore/ -- specification/keyvault/Security.KeyVault.Common/ -- specification/keyvault/Security.KeyVault.RBAC/ -- specification/keyvault/Security.KeyVault.Settings/ diff --git a/sdk/keyvault/azure-keyvault-keys/tsp-location.yaml b/sdk/keyvault/azure-keyvault-keys/tsp-location.yaml index 34a34af2d863..d837ec69a67b 100644 --- a/sdk/keyvault/azure-keyvault-keys/tsp-location.yaml +++ b/sdk/keyvault/azure-keyvault-keys/tsp-location.yaml @@ -1,5 +1,3 @@ -directory: specification/keyvault/Security.KeyVault.Keys -commit: 9015889a4cae078355b642b3c66d0f18ce41e075 +directory: specification/keyvault/data-plane/Keys +commit: f6bd06be22baf3a18504ffef0f590230850953e5 repo: Azure/azure-rest-api-specs -additionalDirectories: -- specification/keyvault/Security.KeyVault.Common/ From b3ad7beb084bfca9297dea4f59dca9366994ceac Mon Sep 17 00:00:00 2001 From: Nicola Camillucci Date: Thu, 16 Apr 2026 17:32:38 +0100 Subject: [PATCH 02/11] Regenerated Keys SDK --- .../azure-keyvault-keys/_metadata.json | 6 + .../apiview-properties.json | 92 ++++ .../azure-keyvault-keys/azure/__init__.py | 4 - .../azure/keyvault/__init__.py | 4 - .../azure/keyvault/keys/_client.py | 34 +- .../azure/keyvault/keys/_generated/_client.py | 9 +- .../keys/_generated/_configuration.py | 7 +- .../keys/_generated/_operations/__init__.py | 6 +- .../_generated/_operations/_operations.py | 284 ++++++++---- .../keys/_generated/_utils/model_base.py | 421 +++++++++++++----- .../keys/_generated/_utils/serialization.py | 43 +- .../keyvault/keys/_generated/_validation.py | 20 +- .../keyvault/keys/_generated/_version.py | 2 +- .../keyvault/keys/_generated/aio/_client.py | 9 +- .../keys/_generated/aio/_configuration.py | 7 +- .../_generated/aio/_operations/__init__.py | 6 +- .../_generated/aio/_operations/_operations.py | 226 +++++++--- .../keyvault/keys/_generated/models/_enums.py | 38 +- .../keys/_generated/models/_models.py | 46 +- .../azure/keyvault/keys/_models.py | 8 +- .../_shared/async_challenge_auth_policy.py | 10 +- .../azure/keyvault/keys/aio/_client.py | 21 +- .../azure/keyvault/keys/crypto/_client.py | 36 +- .../azure/keyvault/keys/crypto/_enums.py | 1 + .../keyvault/keys/crypto/_internal/rsa_key.py | 9 +- .../azure/keyvault/keys/crypto/_models.py | 1 + .../azure/keyvault/keys/crypto/aio/_client.py | 14 +- .../samples/backup_restore_operations.py | 3 +- .../backup_restore_operations_async.py | 4 +- .../samples/hello_world.py | 7 +- .../samples/hello_world_async.py | 2 + .../samples/key_rotation.py | 3 +- .../samples/key_rotation_async.py | 4 +- .../samples/list_operations.py | 3 +- .../samples/list_operations_async.py | 2 + .../samples/recover_purge_operations.py | 3 +- .../samples/recover_purge_operations_async.py | 2 + .../samples/send_request.py | 5 +- .../tests/_keys_test_case.py | 6 +- .../tests/_shared/test_case.py | 2 - .../tests/_shared/test_case_async.py | 2 +- .../azure-keyvault-keys/tests/conftest.py | 31 +- .../azure-keyvault-keys/tests/keys.py | 1 + .../tests/perfstress_tests/sign.py | 1 + .../tests/test_challenge_auth.py | 18 +- .../tests/test_challenge_auth_async.py | 24 +- .../tests/test_crypto_client.py | 87 ++-- .../tests/test_crypto_client_async.py | 76 ++-- .../tests/test_examples_crypto.py | 5 +- .../tests/test_examples_crypto_async.py | 10 +- .../tests/test_key_client.py | 106 +++-- .../tests/test_keys_async.py | 124 +++--- .../tests/test_local_crypto.py | 16 +- .../tests/test_parse_id.py | 2 +- .../tests/test_samples_keys.py | 12 +- .../tests/test_samples_keys_async.py | 12 +- 56 files changed, 1246 insertions(+), 691 deletions(-) create mode 100644 sdk/keyvault/azure-keyvault-keys/_metadata.json create mode 100644 sdk/keyvault/azure-keyvault-keys/apiview-properties.json diff --git a/sdk/keyvault/azure-keyvault-keys/_metadata.json b/sdk/keyvault/azure-keyvault-keys/_metadata.json new file mode 100644 index 000000000000..0a2924fbf51d --- /dev/null +++ b/sdk/keyvault/azure-keyvault-keys/_metadata.json @@ -0,0 +1,6 @@ +{ + "apiVersion": "2025-07-01", + "apiVersions": { + "KeyVault": "2025-07-01" + } +} \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-keys/apiview-properties.json b/sdk/keyvault/azure-keyvault-keys/apiview-properties.json new file mode 100644 index 000000000000..2fbacff777a3 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-keys/apiview-properties.json @@ -0,0 +1,92 @@ +{ + "CrossLanguagePackageId": "KeyVault", + "CrossLanguageDefinitionId": { + "azure.keyvault.keys._generated.models.BackupKeyResult": "KeyVault.BackupKeyResult", + "azure.keyvault.keys._generated.models.DeletedKeyBundle": "KeyVault.DeletedKeyBundle", + "azure.keyvault.keys._generated.models.DeletedKeyItem": "KeyVault.DeletedKeyItem", + "azure.keyvault.keys._generated.models.GetRandomBytesRequest": "KeyVault.GetRandomBytesRequest", + "azure.keyvault.keys._generated.models.JsonWebKey": "KeyVault.JsonWebKey", + "azure.keyvault.keys._generated.models.KeyAttestation": "KeyVault.KeyAttestation", + "azure.keyvault.keys._generated.models.KeyAttributes": "KeyVault.KeyAttributes", + "azure.keyvault.keys._generated.models.KeyBundle": "KeyVault.KeyBundle", + "azure.keyvault.keys._generated.models.KeyCreateParameters": "KeyVault.KeyCreateParameters", + "azure.keyvault.keys._generated.models.KeyImportParameters": "KeyVault.KeyImportParameters", + "azure.keyvault.keys._generated.models.KeyItem": "KeyVault.KeyItem", + "azure.keyvault.keys._generated.models.KeyOperationResult": "KeyVault.KeyOperationResult", + "azure.keyvault.keys._generated.models.KeyOperationsParameters": "KeyVault.KeyOperationsParameters", + "azure.keyvault.keys._generated.models.KeyReleaseParameters": "KeyVault.KeyReleaseParameters", + "azure.keyvault.keys._generated.models.KeyReleasePolicy": "KeyVault.KeyReleasePolicy", + "azure.keyvault.keys._generated.models.KeyReleaseResult": "KeyVault.KeyReleaseResult", + "azure.keyvault.keys._generated.models.KeyRestoreParameters": "KeyVault.KeyRestoreParameters", + "azure.keyvault.keys._generated.models.KeyRotationPolicy": "KeyVault.KeyRotationPolicy", + "azure.keyvault.keys._generated.models.KeyRotationPolicyAttributes": "KeyVault.KeyRotationPolicyAttributes", + "azure.keyvault.keys._generated.models.KeySignParameters": "KeyVault.KeySignParameters", + "azure.keyvault.keys._generated.models.KeyUpdateParameters": "KeyVault.KeyUpdateParameters", + "azure.keyvault.keys._generated.models.KeyVaultError": "KeyVaultError", + "azure.keyvault.keys._generated.models.KeyVaultErrorError": "KeyVaultError.error.anonymous", + "azure.keyvault.keys._generated.models.KeyVerifyParameters": "KeyVault.KeyVerifyParameters", + "azure.keyvault.keys._generated.models.KeyVerifyResult": "KeyVault.KeyVerifyResult", + "azure.keyvault.keys._generated.models.LifetimeActions": "KeyVault.LifetimeActions", + "azure.keyvault.keys._generated.models.LifetimeActionsTrigger": "KeyVault.LifetimeActionsTrigger", + "azure.keyvault.keys._generated.models.LifetimeActionsType": "KeyVault.LifetimeActionsType", + "azure.keyvault.keys._generated.models.RandomBytes": "KeyVault.RandomBytes", + "azure.keyvault.keys._generated.models.JsonWebKeyType": "KeyVault.JsonWebKeyType", + "azure.keyvault.keys._generated.models.JsonWebKeyCurveName": "KeyVault.JsonWebKeyCurveName", + "azure.keyvault.keys._generated.models.DeletionRecoveryLevel": "KeyVault.DeletionRecoveryLevel", + "azure.keyvault.keys._generated.models.JsonWebKeyOperation": "KeyVault.JsonWebKeyOperation", + "azure.keyvault.keys._generated.models.JsonWebKeyEncryptionAlgorithm": "KeyVault.JsonWebKeyEncryptionAlgorithm", + "azure.keyvault.keys._generated.models.JsonWebKeySignatureAlgorithm": "KeyVault.JsonWebKeySignatureAlgorithm", + "azure.keyvault.keys._generated.models.KeyEncryptionAlgorithm": "KeyVault.KeyEncryptionAlgorithm", + "azure.keyvault.keys._generated.models.KeyRotationPolicyAction": "KeyVault.KeyRotationPolicyAction", + "azure.keyvault.keys._generated.KeyVaultClient.create_key": "KeyVault.createKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.create_key": "KeyVault.createKey", + "azure.keyvault.keys._generated.KeyVaultClient.rotate_key": "KeyVault.rotateKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.rotate_key": "KeyVault.rotateKey", + "azure.keyvault.keys._generated.KeyVaultClient.import_key": "KeyVault.importKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.import_key": "KeyVault.importKey", + "azure.keyvault.keys._generated.KeyVaultClient.delete_key": "KeyVault.deleteKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.delete_key": "KeyVault.deleteKey", + "azure.keyvault.keys._generated.KeyVaultClient.update_key": "KeyVault.updateKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.update_key": "KeyVault.updateKey", + "azure.keyvault.keys._generated.KeyVaultClient.get_key": "KeyVault.getKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_key": "KeyVault.getKey", + "azure.keyvault.keys._generated.KeyVaultClient.get_key_versions": "KeyVault.getKeyVersions", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_key_versions": "KeyVault.getKeyVersions", + "azure.keyvault.keys._generated.KeyVaultClient.get_keys": "KeyVault.getKeys", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_keys": "KeyVault.getKeys", + "azure.keyvault.keys._generated.KeyVaultClient.backup_key": "KeyVault.backupKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.backup_key": "KeyVault.backupKey", + "azure.keyvault.keys._generated.KeyVaultClient.restore_key": "KeyVault.restoreKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.restore_key": "KeyVault.restoreKey", + "azure.keyvault.keys._generated.KeyVaultClient.encrypt": "KeyVault.encrypt", + "azure.keyvault.keys._generated.aio.KeyVaultClient.encrypt": "KeyVault.encrypt", + "azure.keyvault.keys._generated.KeyVaultClient.decrypt": "KeyVault.decrypt", + "azure.keyvault.keys._generated.aio.KeyVaultClient.decrypt": "KeyVault.decrypt", + "azure.keyvault.keys._generated.KeyVaultClient.sign": "KeyVault.sign", + "azure.keyvault.keys._generated.aio.KeyVaultClient.sign": "KeyVault.sign", + "azure.keyvault.keys._generated.KeyVaultClient.verify": "KeyVault.verify", + "azure.keyvault.keys._generated.aio.KeyVaultClient.verify": "KeyVault.verify", + "azure.keyvault.keys._generated.KeyVaultClient.wrap_key": "KeyVault.wrapKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.wrap_key": "KeyVault.wrapKey", + "azure.keyvault.keys._generated.KeyVaultClient.unwrap_key": "KeyVault.unwrapKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.unwrap_key": "KeyVault.unwrapKey", + "azure.keyvault.keys._generated.KeyVaultClient.release": "KeyVault.release", + "azure.keyvault.keys._generated.aio.KeyVaultClient.release": "KeyVault.release", + "azure.keyvault.keys._generated.KeyVaultClient.get_deleted_keys": "KeyVault.getDeletedKeys", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_deleted_keys": "KeyVault.getDeletedKeys", + "azure.keyvault.keys._generated.KeyVaultClient.get_deleted_key": "KeyVault.getDeletedKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_deleted_key": "KeyVault.getDeletedKey", + "azure.keyvault.keys._generated.KeyVaultClient.purge_deleted_key": "KeyVault.purgeDeletedKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.purge_deleted_key": "KeyVault.purgeDeletedKey", + "azure.keyvault.keys._generated.KeyVaultClient.recover_deleted_key": "KeyVault.recoverDeletedKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.recover_deleted_key": "KeyVault.recoverDeletedKey", + "azure.keyvault.keys._generated.KeyVaultClient.get_key_rotation_policy": "KeyVault.getKeyRotationPolicy", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_key_rotation_policy": "KeyVault.getKeyRotationPolicy", + "azure.keyvault.keys._generated.KeyVaultClient.update_key_rotation_policy": "KeyVault.updateKeyRotationPolicy", + "azure.keyvault.keys._generated.aio.KeyVaultClient.update_key_rotation_policy": "KeyVault.updateKeyRotationPolicy", + "azure.keyvault.keys._generated.KeyVaultClient.get_random_bytes": "KeyVault.getRandomBytes", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_random_bytes": "KeyVault.getRandomBytes", + "azure.keyvault.keys._generated.KeyVaultClient.get_key_attestation": "KeyVault.getKeyAttestation", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_key_attestation": "KeyVault.getKeyAttestation" + } +} \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-keys/azure/__init__.py b/sdk/keyvault/azure-keyvault-keys/azure/__init__.py index 679ab6995134..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/__init__.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/__init__.py @@ -1,5 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/__init__.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/__init__.py index 679ab6995134..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/__init__.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/__init__.py @@ -1,5 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_client.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_client.py index 6ab95a316bd7..d54c2457648e 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_client.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_client.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -78,12 +79,12 @@ def _get_attributes( return None def get_cryptography_client( - self, - key_name: str, - *, - key_version: Optional[str] = None, - **kwargs, # pylint: disable=unused-argument - ) -> CryptographyClient: + self, + key_name: str, + *, + key_version: Optional[str] = None, + **kwargs, # pylint: disable=unused-argument + ) -> CryptographyClient: """Gets a :class:`~azure.keyvault.keys.crypto.CryptographyClient` for the given key. :param str key_name: The name of the key used to perform cryptographic operations. @@ -398,7 +399,9 @@ def create_oct_key( ) @distributed_trace - def begin_delete_key(self, name: str, **kwargs: Any) -> LROPoller[DeletedKey]: # pylint:disable=bad-option-value,delete-operation-wrong-return-type + def begin_delete_key( + self, name: str, **kwargs: Any + ) -> LROPoller[DeletedKey]: # pylint:disable=bad-option-value,delete-operation-wrong-return-type """Delete all versions of a key and its cryptographic material. Requires keys/delete permission. When this method returns Key Vault has begun deleting the key. Deletion may @@ -519,7 +522,7 @@ def list_deleted_keys(self, **kwargs: Any) -> ItemPaged[DeletedKey]: return self._client.get_deleted_keys( maxresults=kwargs.pop("max_page_size", None), cls=lambda objs: [DeletedKey._from_deleted_key_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace @@ -542,7 +545,7 @@ def list_properties_of_keys(self, **kwargs: Any) -> ItemPaged[KeyProperties]: return self._client.get_keys( maxresults=kwargs.pop("max_page_size", None), cls=lambda objs: [KeyProperties._from_key_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace @@ -568,7 +571,7 @@ def list_properties_of_key_versions(self, name: str, **kwargs: Any) -> ItemPaged name, maxresults=kwargs.pop("max_page_size", None), cls=lambda objs: [KeyProperties._from_key_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace @@ -709,9 +712,7 @@ def update_key_properties( release_policy=policy, ) - bundle = self._client.update_key( - name, key_version=version or "", parameters=parameters, **kwargs - ) + bundle = self._client.update_key(name, key_version=version or "", parameters=parameters, **kwargs) return KeyVaultKey._from_key_bundle(bundle) @distributed_trace @@ -770,8 +771,7 @@ def restore_key_backup(self, backup: bytes, **kwargs: Any) -> KeyVaultKey: :dedent: 8 """ bundle = self._client.restore_key( - parameters=self._models.KeyRestoreParameters(key_bundle_backup=backup), - **kwargs + parameters=self._models.KeyRestoreParameters(key_bundle_backup=backup), **kwargs ) return KeyVaultKey._from_key_bundle(bundle) @@ -877,7 +877,7 @@ def release_key( nonce=nonce, enc=algorithm, ), - **kwargs + **kwargs, ) return ReleaseKeyResult(result.value) @@ -988,7 +988,7 @@ def update_key_rotation_policy( # pylint: disable=unused-argument @distributed_trace def get_key_attestation(self, name: str, version: Optional[str] = None, **kwargs: Any) -> KeyVaultKey: """Get a key and its attestation blob. - + This method is applicable to any key stored in Azure Key Vault Managed HSM. This operation requires the keys/get permission. diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_client.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_client.py index affcf5d228d3..0ab087a35122 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_client.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_client.py @@ -15,14 +15,14 @@ from azure.core.rest import HttpRequest, HttpResponse from ._configuration import KeyVaultClientConfiguration -from ._operations import KeyVaultClientOperationsMixin +from ._operations import _KeyVaultClientOperationsMixin from ._utils.serialization import Deserializer, Serializer if TYPE_CHECKING: from azure.core.credentials import TokenCredential -class KeyVaultClient(KeyVaultClientOperationsMixin): +class KeyVaultClient(_KeyVaultClientOperationsMixin): """The key vault client performs cryptographic key operations and vault operations against the Key Vault service. @@ -30,8 +30,9 @@ class KeyVaultClient(KeyVaultClientOperationsMixin): :type vault_base_url: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials.TokenCredential - :keyword api_version: The API version to use for this operation. Default value is "7.6". Note - that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-07-01". + Default value is "2025-07-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str """ diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_configuration.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_configuration.py index 8780efd9f643..ea1708489bee 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_configuration.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_configuration.py @@ -26,13 +26,14 @@ class KeyVaultClientConfiguration: # pylint: disable=too-many-instance-attribut :type vault_base_url: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials.TokenCredential - :keyword api_version: The API version to use for this operation. Default value is "7.6". Note - that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-07-01". + Default value is "2025-07-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str """ def __init__(self, vault_base_url: str, credential: "TokenCredential", **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "7.6") + api_version: str = kwargs.pop("api_version", "2025-07-01") if vault_base_url is None: raise ValueError("Parameter 'vault_base_url' must not be None.") diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/__init__.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/__init__.py index d514f5e4b5be..79e1a2ccf3da 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/__init__.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/__init__.py @@ -12,14 +12,12 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import KeyVaultClientOperationsMixin # type: ignore +from ._operations import _KeyVaultClientOperationsMixin # type: ignore # pylint: disable=unused-import from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "KeyVaultClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/_operations.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/_operations.py index 2acc3fc6df38..de4199053d26 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/_operations.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/_operations.py @@ -9,7 +9,7 @@ from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload import urllib.parse from azure.core import PipelineClient @@ -38,7 +38,7 @@ JSON = MutableMapping[str, Any] T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -49,7 +49,7 @@ def build_key_vault_create_key_request(key_name: str, **kwargs: Any) -> HttpRequ _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -75,7 +75,7 @@ def build_key_vault_rotate_key_request(key_name: str, **kwargs: Any) -> HttpRequ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -100,7 +100,7 @@ def build_key_vault_import_key_request(key_name: str, **kwargs: Any) -> HttpRequ _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -126,7 +126,7 @@ def build_key_vault_delete_key_request(key_name: str, **kwargs: Any) -> HttpRequ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -151,7 +151,7 @@ def build_key_vault_update_key_request(key_name: str, key_version: str, **kwargs _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -178,7 +178,7 @@ def build_key_vault_get_key_request(key_name: str, key_version: str, **kwargs: A _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -205,7 +205,7 @@ def build_key_vault_get_key_versions_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -231,7 +231,7 @@ def build_key_vault_get_keys_request(*, maxresults: Optional[int] = None, **kwar _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -252,7 +252,7 @@ def build_key_vault_backup_key_request(key_name: str, **kwargs: Any) -> HttpRequ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -277,7 +277,7 @@ def build_key_vault_restore_key_request(**kwargs: Any) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -299,7 +299,7 @@ def build_key_vault_encrypt_request(key_name: str, key_version: str, **kwargs: A _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -327,7 +327,7 @@ def build_key_vault_decrypt_request(key_name: str, key_version: str, **kwargs: A _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -355,7 +355,7 @@ def build_key_vault_sign_request(key_name: str, key_version: str, **kwargs: Any) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -383,7 +383,7 @@ def build_key_vault_verify_request(key_name: str, key_version: str, **kwargs: An _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -411,7 +411,7 @@ def build_key_vault_wrap_key_request(key_name: str, key_version: str, **kwargs: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -439,7 +439,7 @@ def build_key_vault_unwrap_key_request(key_name: str, key_version: str, **kwargs _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -467,7 +467,7 @@ def build_key_vault_release_request(key_name: str, key_version: str, **kwargs: A _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -494,7 +494,7 @@ def build_key_vault_get_deleted_keys_request(*, maxresults: Optional[int] = None _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -515,7 +515,7 @@ def build_key_vault_get_deleted_key_request(key_name: str, **kwargs: Any) -> Htt _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -538,12 +538,9 @@ def build_key_vault_get_deleted_key_request(key_name: str, **kwargs: Any) -> Htt def build_key_vault_purge_deleted_key_request( # pylint: disable=name-too-long key_name: str, **kwargs: Any ) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) - accept = _headers.pop("Accept", "application/json") - + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) # Construct URL _url = "/deletedkeys/{key-name}" path_format_arguments = { @@ -555,10 +552,7 @@ def build_key_vault_purge_deleted_key_request( # pylint: disable=name-too-long # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) def build_key_vault_recover_deleted_key_request( # pylint: disable=name-too-long @@ -567,7 +561,7 @@ def build_key_vault_recover_deleted_key_request( # pylint: disable=name-too-lon _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -593,7 +587,7 @@ def build_key_vault_get_key_rotation_policy_request( # pylint: disable=name-too _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -620,7 +614,7 @@ def build_key_vault_update_key_rotation_policy_request( # pylint: disable=name- _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -647,7 +641,7 @@ def build_key_vault_get_random_bytes_request(**kwargs: Any) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -670,7 +664,7 @@ def build_key_vault_get_key_attestation_request( # pylint: disable=name-too-lon _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -691,7 +685,7 @@ def build_key_vault_get_key_attestation_request( # pylint: disable=name-too-lon return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -class KeyVaultClientOperationsMixin( # pylint: disable=too-many-public-methods +class _KeyVaultClientOperationsMixin( # pylint: disable=too-many-public-methods ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], KeyVaultClientConfiguration] ): @@ -834,6 +828,7 @@ def create_key( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -848,11 +843,14 @@ def create_key( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -902,6 +900,7 @@ def rotate_key(self, key_name: str, **kwargs: Any) -> _models.KeyBundle: } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -916,11 +915,14 @@ def rotate_key(self, key_name: str, **kwargs: Any) -> _models.KeyBundle: except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -1068,6 +1070,7 @@ def import_key( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1082,11 +1085,14 @@ def import_key( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -1136,6 +1142,7 @@ def delete_key(self, key_name: str, **kwargs: Any) -> _models.DeletedKeyBundle: } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1150,11 +1157,14 @@ def delete_key(self, key_name: str, **kwargs: Any) -> _models.DeletedKeyBundle: except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.DeletedKeyBundle, response.json()) @@ -1320,6 +1330,7 @@ def update_key( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1334,11 +1345,14 @@ def update_key( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -1391,6 +1405,7 @@ def get_key(self, key_name: str, key_version: str, **kwargs: Any) -> _models.Key } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1405,11 +1420,14 @@ def get_key(self, key_name: str, key_version: str, **kwargs: Any) -> _models.Key except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -1421,7 +1439,7 @@ def get_key(self, key_name: str, key_version: str, **kwargs: Any) -> _models.Key @distributed_trace def get_key_versions( self, key_name: str, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> Iterable["_models.KeyItem"]: + ) -> ItemPaged["_models.KeyItem"]: """Retrieves a list of individual key versions with the same key name. The full key identifier, attributes, and tags are provided in the response. This operation @@ -1439,7 +1457,7 @@ def get_key_versions( _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.KeyItem]] = kwargs.pop("cls", None) + cls: ClsType[list[_models.KeyItem]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -1490,7 +1508,10 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.KeyItem], deserialized.get("value", [])) + list_of_elem = _deserialize( + list[_models.KeyItem], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -1506,7 +1527,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) return pipeline_response @@ -1514,7 +1538,7 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - def get_keys(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> Iterable["_models.KeyItem"]: + def get_keys(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> ItemPaged["_models.KeyItem"]: """List keys in the specified vault. Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the @@ -1532,7 +1556,7 @@ def get_keys(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> Iterab _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.KeyItem]] = kwargs.pop("cls", None) + cls: ClsType[list[_models.KeyItem]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -1582,7 +1606,10 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.KeyItem], deserialized.get("value", [])) + list_of_elem = _deserialize( + list[_models.KeyItem], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -1598,7 +1625,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) return pipeline_response @@ -1653,6 +1683,7 @@ def backup_key(self, key_name: str, **kwargs: Any) -> _models.BackupKeyResult: } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1667,11 +1698,14 @@ def backup_key(self, key_name: str, **kwargs: Any) -> _models.BackupKeyResult: except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.BackupKeyResult, response.json()) @@ -1821,6 +1855,7 @@ def restore_key( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1835,11 +1870,14 @@ def restore_key( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -2021,6 +2059,7 @@ def encrypt( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2035,11 +2074,14 @@ def encrypt( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyOperationResult, response.json()) @@ -2229,6 +2271,7 @@ def decrypt( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2243,11 +2286,14 @@ def decrypt( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyOperationResult, response.json()) @@ -2408,6 +2454,7 @@ def sign( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2422,11 +2469,14 @@ def sign( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyOperationResult, response.json()) @@ -2596,6 +2646,7 @@ def verify( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2610,11 +2661,14 @@ def verify( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyVerifyResult, response.json()) @@ -2788,6 +2842,7 @@ def wrap_key( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2802,11 +2857,14 @@ def wrap_key( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyOperationResult, response.json()) @@ -2972,6 +3030,7 @@ def unwrap_key( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2986,11 +3045,14 @@ def unwrap_key( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyOperationResult, response.json()) @@ -3152,6 +3214,7 @@ def release( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3166,11 +3229,14 @@ def release( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyReleaseResult, response.json()) @@ -3182,7 +3248,7 @@ def release( @distributed_trace def get_deleted_keys( self, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> Iterable["_models.DeletedKeyItem"]: + ) -> ItemPaged["_models.DeletedKeyItem"]: """Lists the deleted keys in the specified vault. Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the @@ -3201,7 +3267,7 @@ def get_deleted_keys( _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.DeletedKeyItem]] = kwargs.pop("cls", None) + cls: ClsType[list[_models.DeletedKeyItem]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -3251,7 +3317,10 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.DeletedKeyItem], deserialized.get("value", [])) + list_of_elem = _deserialize( + list[_models.DeletedKeyItem], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -3267,7 +3336,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) return pipeline_response @@ -3314,6 +3386,7 @@ def get_deleted_key(self, key_name: str, **kwargs: Any) -> _models.DeletedKeyBun } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3328,11 +3401,14 @@ def get_deleted_key(self, key_name: str, **kwargs: Any) -> _models.DeletedKeyBun except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.DeletedKeyBundle, response.json()) @@ -3390,7 +3466,10 @@ def purge_deleted_key(self, key_name: str, **kwargs: Any) -> None: # pylint: di if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if cls: @@ -3437,6 +3516,7 @@ def recover_deleted_key(self, key_name: str, **kwargs: Any) -> _models.KeyBundle } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3451,11 +3531,14 @@ def recover_deleted_key(self, key_name: str, **kwargs: Any) -> _models.KeyBundle except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -3503,6 +3586,7 @@ def get_key_rotation_policy(self, key_name: str, **kwargs: Any) -> _models.KeyRo } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3517,11 +3601,14 @@ def get_key_rotation_policy(self, key_name: str, **kwargs: Any) -> _models.KeyRo except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyRotationPolicy, response.json()) @@ -3653,6 +3740,7 @@ def update_key_rotation_policy( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3667,11 +3755,14 @@ def update_key_rotation_policy( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyRotationPolicy, response.json()) @@ -3785,6 +3876,7 @@ def get_random_bytes( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3799,11 +3891,14 @@ def get_random_bytes( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RandomBytes, response.json()) @@ -3814,8 +3909,9 @@ def get_random_bytes( @distributed_trace @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "key_name", "key_version", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["key_name", "key_version", "accept", "api_version"]}, + api_versions_list=["7.6-preview.2", "7.6", "2025-06-01-preview", "2025-07-01"], ) def get_key_attestation(self, key_name: str, key_version: str, **kwargs: Any) -> _models.KeyBundle: """Gets the public part of a stored key along with its attestation blob. @@ -3860,6 +3956,7 @@ def get_key_attestation(self, key_name: str, key_version: str, **kwargs: Any) -> } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3874,11 +3971,14 @@ def get_key_attestation(self, key_name: str, key_version: str, **kwargs: Any) -> except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_utils/model_base.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_utils/model_base.py index 49d5c7259389..db24930fdca9 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_utils/model_base.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_utils/model_base.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -29,6 +29,7 @@ from azure.core import CaseInsensitiveEnumMeta from azure.core.pipeline import PipelineResponse from azure.core.serialization import _Null +from azure.core.rest import HttpResponse _LOGGER = logging.getLogger(__name__) @@ -36,6 +37,7 @@ TZ_UTC = timezone.utc _T = typing.TypeVar("_T") +_NONE_TYPE = type(None) def _timedelta_as_isostr(td: timedelta) -> str: @@ -170,6 +172,21 @@ def default(self, o): # pylint: disable=too-many-return-statements r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" ) +_ARRAY_ENCODE_MAPPING = { + "pipeDelimited": "|", + "spaceDelimited": " ", + "commaDelimited": ",", + "newlineDelimited": "\n", +} + + +def _deserialize_array_encoded(delimit: str, attr): + if isinstance(attr, str): + if attr == "": + return [] + return attr.split(delimit) + return attr + def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: """Deserialize ISO-8601 formatted string into Datetime object. @@ -201,7 +218,7 @@ def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: test_utc = date_obj.utctimetuple() if test_utc.tm_year > 9999 or test_utc.tm_year < 1: raise OverflowError("Hit max or min date") - return date_obj + return date_obj # type: ignore[no-any-return] def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: @@ -255,7 +272,7 @@ def _deserialize_time(attr: typing.Union[str, time]) -> time: """ if isinstance(attr, time): return attr - return isodate.parse_time(attr) + return isodate.parse_time(attr) # type: ignore[no-any-return] def _deserialize_bytes(attr): @@ -314,6 +331,8 @@ def _deserialize_int_as_str(attr): def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): if annotation is int and rf and rf._format == "str": return _deserialize_int_as_str + if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING: + return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format]) if rf and rf._format: return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore @@ -345,16 +364,46 @@ def _get_model(module_name: str, model_name: str): class _MyMutableMapping(MutableMapping[str, typing.Any]): - def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + def __init__(self, data: dict[str, typing.Any]) -> None: self._data = data def __contains__(self, key: typing.Any) -> bool: return key in self._data def __getitem__(self, key: str) -> typing.Any: + # If this key has been deserialized (for mutable types), we need to handle serialization + if hasattr(self, "_attr_to_rest_field"): + cache_attr = f"_deserialized_{key}" + if hasattr(self, cache_attr): + rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key) + if rf: + value = self._data.get(key) + if isinstance(value, (dict, list, set)): + # For mutable types, serialize and return + # But also update _data with serialized form and clear flag + # so mutations via this returned value affect _data + serialized = _serialize(value, rf._format) + # If serialized form is same type (no transformation needed), + # return _data directly so mutations work + if isinstance(serialized, type(value)) and serialized == value: + return self._data.get(key) + # Otherwise return serialized copy and clear flag + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + # Store serialized form back + self._data[key] = serialized + return serialized return self._data.__getitem__(key) def __setitem__(self, key: str, value: typing.Any) -> None: + # Clear any cached deserialized value when setting through dictionary access + cache_attr = f"_deserialized_{key}" + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass self._data.__setitem__(key, value) def __delitem__(self, key: str) -> None: @@ -425,7 +474,7 @@ def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: return self._data.pop(key) return self._data.pop(key, default) - def popitem(self) -> typing.Tuple[str, typing.Any]: + def popitem(self) -> tuple[str, typing.Any]: """ Removes and returns some (key, value) pair :returns: The (key, value) pair. @@ -466,6 +515,8 @@ def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: return self._data.setdefault(key, default) def __eq__(self, other: typing.Any) -> bool: + if isinstance(other, _MyMutableMapping): + return self._data == other._data try: other_model = self.__class__(other) except Exception: @@ -482,6 +533,8 @@ def _is_model(obj: typing.Any) -> bool: def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements if isinstance(o, list): + if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o): + return _ARRAY_ENCODE_MAPPING[format].join(o) return [_serialize(x, format) for x in o] if isinstance(o, dict): return {k: _serialize(v, format) for k, v in o.items()} @@ -513,9 +566,7 @@ def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-m return o -def _get_rest_field( - attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str -) -> typing.Optional["_RestField"]: +def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: try: return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) except StopIteration: @@ -538,7 +589,7 @@ class Model(_MyMutableMapping): _is_model = True # label whether current class's _attr_to_rest_field has been calculated # could not see _attr_to_rest_field directly because subclass inherits it from parent class - _calculated: typing.Set[str] = set() + _calculated: set[str] = set() def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: class_name = self.__class__.__name__ @@ -549,54 +600,9 @@ def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: for rest_field in self._attr_to_rest_field.values() if rest_field._default is not _UNSET } - if args: # pylint: disable=too-many-nested-blocks + if args: if isinstance(args[0], ET.Element): - existed_attr_keys = [] - model_meta = getattr(self, "_xml", {}) - - for rf in self._attr_to_rest_field.values(): - prop_meta = getattr(rf, "_xml", {}) - xml_name = prop_meta.get("name", rf._rest_name) - xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) - if xml_ns: - xml_name = "{" + xml_ns + "}" + xml_name - - # attribute - if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: - existed_attr_keys.append(xml_name) - dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) - continue - - # unwrapped element is array - if prop_meta.get("unwrapped", False): - # unwrapped array could either use prop items meta/prop meta - if prop_meta.get("itemsName"): - xml_name = prop_meta.get("itemsName") - xml_ns = prop_meta.get("itemNs") - if xml_ns: - xml_name = "{" + xml_ns + "}" + xml_name - items = args[0].findall(xml_name) # pyright: ignore - if len(items) > 0: - existed_attr_keys.append(xml_name) - dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) - continue - - # text element is primitive type - if prop_meta.get("text", False): - if args[0].text is not None: - dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) - continue - - # wrapped element could be normal property or array, it should only have one element - item = args[0].find(xml_name) - if item is not None: - existed_attr_keys.append(xml_name) - dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) - - # rest thing is additional properties - for e in args[0]: - if e.tag not in existed_attr_keys: - dict_to_pass[e.tag] = _convert_element(e) + dict_to_pass.update(self._init_from_xml(args[0])) else: dict_to_pass.update( {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} @@ -615,6 +621,69 @@ def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: ) super().__init__(dict_to_pass) + def _init_from_xml(self, element: ET.Element) -> dict[str, typing.Any]: + """Deserialize an XML element into a dict mapping rest field names to values. + + :param ET.Element element: The XML element to deserialize from. + :returns: A dictionary of rest_name to deserialized value pairs. + :rtype: dict + """ + result: dict[str, typing.Any] = {} + model_meta = getattr(self, "_xml", {}) + existed_attr_keys: list[str] = [] + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = _resolve_xml_ns(prop_meta, model_meta) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and element.get(xml_name) is not None: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = _deserialize(rf._type, element.get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + _items_name = prop_meta.get("itemsName") + if _items_name: + xml_name = _items_name + _items_ns = prop_meta.get("itemsNs") + if _items_ns is not None: + xml_ns = _items_ns + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = element.findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = _deserialize(rf._type, items) + elif not rf._is_optional: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = [] + continue + + # text element is primitive type + if prop_meta.get("text", False): + if element.text is not None: + result[rf._rest_name] = _deserialize(rf._type, element.text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = element.find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in element: + if e.tag not in existed_attr_keys: + result[e.tag] = _convert_element(e) + + return result + def copy(self) -> "Model": return Model(self.__dict__) @@ -623,7 +692,7 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order - attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") } annotations = { @@ -638,7 +707,7 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) if not rf._rest_name_input: rf._rest_name_input = attr - cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") return super().__new__(cls) @@ -667,7 +736,7 @@ def _deserialize(cls, data, exist_discriminators): model_meta = getattr(cls, "_xml", {}) prop_meta = getattr(discriminator, "_xml", {}) xml_name = prop_meta.get("name", discriminator._rest_name) - xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + xml_ns = _resolve_xml_ns(prop_meta, model_meta) if xml_ns: xml_name = "{" + xml_ns + "}" + xml_name @@ -680,7 +749,7 @@ def _deserialize(cls, data, exist_discriminators): mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member return mapped_cls._deserialize(data, exist_discriminators) - def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: """Return a dict that can be turned into json using json.dump. :keyword bool exclude_readonly: Whether to remove the readonly properties. @@ -740,7 +809,7 @@ def _deserialize_with_union(deserializers, obj): def _deserialize_dict( value_deserializer: typing.Optional[typing.Callable], module: typing.Optional[str], - obj: typing.Dict[typing.Any, typing.Any], + obj: dict[typing.Any, typing.Any], ): if obj is None: return obj @@ -750,7 +819,7 @@ def _deserialize_dict( def _deserialize_multiple_sequence( - entry_deserializers: typing.List[typing.Optional[typing.Callable]], + entry_deserializers: list[typing.Optional[typing.Callable]], module: typing.Optional[str], obj, ): @@ -759,6 +828,14 @@ def _deserialize_multiple_sequence( return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) +def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool: + return ( + isinstance(deserializer, functools.partial) + and isinstance(deserializer.args[0], functools.partial) + and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable + ) + + def _deserialize_sequence( deserializer: typing.Optional[typing.Callable], module: typing.Optional[str], @@ -768,17 +845,30 @@ def _deserialize_sequence( return obj if isinstance(obj, ET.Element): obj = list(obj) + + # encoded string may be deserialized to sequence + if isinstance(obj, str) and isinstance(deserializer, functools.partial): + # for list[str] + if _is_array_encoded_deserializer(deserializer): + return deserializer(obj) + + # for list[Union[...]] + if isinstance(deserializer.args[0], list): + for sub_deserializer in deserializer.args[0]: + if _is_array_encoded_deserializer(sub_deserializer): + return sub_deserializer(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) -def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: +def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: return sorted( types, key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), ) -def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches annotation: typing.Any, module: typing.Optional[str], rf: typing.Optional["_RestField"] = None, @@ -818,16 +908,18 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur # is it optional? try: - if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore + if rf: + rf._is_optional = True if len(annotation.__args__) <= 2: # pyright: ignore if_obj_deserializer = _get_deserialize_callable_from_annotation( - next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore ) return functools.partial(_deserialize_with_optional, if_obj_deserializer) # the type is Optional[Union[...]], we need to remove the None type from the Union annotation_copy = copy.copy(annotation) - annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) except AttributeError: pass @@ -843,7 +935,10 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur return functools.partial(_deserialize_with_union, deserializers) try: - if annotation._name == "Dict": # pyright: ignore + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() == "dict": value_deserializer = _get_deserialize_callable_from_annotation( annotation.__args__[1], module, rf # pyright: ignore ) @@ -856,7 +951,10 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur except (AttributeError, IndexError): pass try: - if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: if len(annotation.__args__) > 1: # pyright: ignore entry_deserializers = [ _get_deserialize_callable_from_annotation(dt, module, rf) @@ -905,16 +1003,20 @@ def _deserialize_with_callable( return float(value.text) if value.text else None if deserializer is bool: return value.text == "true" if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING.values(): + return deserializer(value.text) if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values(): + return deserializer(value.text) if value.text else None if deserializer is None: return value if deserializer in [int, float, bool]: return deserializer(value) if isinstance(deserializer, CaseInsensitiveEnumMeta): try: - return deserializer(value) + return deserializer(value.text if isinstance(value, ET.Element) else value) except ValueError: # for unknown value, return raw value - return value + return value.text if isinstance(value, ET.Element) else value if isinstance(deserializer, type) and issubclass(deserializer, Model): return deserializer._deserialize(value, []) return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) @@ -940,14 +1042,14 @@ def _deserialize( def _failsafe_deserialize( deserializer: typing.Any, - value: typing.Any, + response: HttpResponse, module: typing.Optional[str] = None, rf: typing.Optional["_RestField"] = None, format: typing.Optional[str] = None, ) -> typing.Any: try: - return _deserialize(deserializer, value, module, rf, format) - except DeserializationError: + return _deserialize(deserializer, response.json(), module, rf, format) + except Exception: # pylint: disable=broad-except _LOGGER.warning( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) @@ -956,17 +1058,18 @@ def _failsafe_deserialize( def _failsafe_deserialize_xml( deserializer: typing.Any, - value: typing.Any, + response: HttpResponse, ) -> typing.Any: try: - return _deserialize_xml(deserializer, value) - except DeserializationError: + return _deserialize_xml(deserializer, response.text()) + except Exception: # pylint: disable=broad-except _LOGGER.warning( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) return None +# pylint: disable=too-many-instance-attributes class _RestField: def __init__( self, @@ -974,11 +1077,11 @@ def __init__( name: typing.Optional[str] = None, type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin is_discriminator: bool = False, - visibility: typing.Optional[typing.List[str]] = None, + visibility: typing.Optional[list[str]] = None, default: typing.Any = _UNSET, format: typing.Optional[str] = None, is_multipart_file_input: bool = False, - xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, ): self._type = type self._rest_name_input = name @@ -986,6 +1089,7 @@ def __init__( self._is_discriminator = is_discriminator self._visibility = visibility self._is_model = False + self._is_optional = False self._default = default self._format = format self._is_multipart_file_input = is_multipart_file_input @@ -993,7 +1097,11 @@ def __init__( @property def _class_type(self) -> typing.Any: - return getattr(self._type, "args", [None])[0] + result = getattr(self._type, "args", [None])[0] + # type may be wrapped by nested functools.partial so we need to check for that + if isinstance(result, functools.partial): + return getattr(result, "args", [None])[0] + return result @property def _rest_name(self) -> str: @@ -1004,14 +1112,37 @@ def _rest_name(self) -> str: def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin # by this point, type and rest_name will have a value bc we default # them in __new__ of the Model class - item = obj.get(self._rest_name) + # Use _data.get() directly to avoid triggering __getitem__ which clears the cache + item = obj._data.get(self._rest_name) if item is None: return item if self._is_model: return item - return _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, we want mutations to directly affect _data + # Check if we've already deserialized this value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + # Return the value from _data directly (it's been deserialized in place) + return obj._data.get(self._rest_name) + + deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, store the deserialized value back in _data + # so mutations directly affect _data + if isinstance(deserialized, (dict, list, set)): + obj._data[self._rest_name] = deserialized + object.__setattr__(obj, cache_attr, True) # Mark as deserialized + return deserialized + + return deserialized def __set__(self, obj: Model, value) -> None: + # Clear the cached deserialized object when setting a new value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + object.__delattr__(obj, cache_attr) + if value is None: # we want to wipe out entries if users set attr to None try: @@ -1036,11 +1167,11 @@ def rest_field( *, name: typing.Optional[str] = None, type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin - visibility: typing.Optional[typing.List[str]] = None, + visibility: typing.Optional[list[str]] = None, default: typing.Any = _UNSET, format: typing.Optional[str] = None, is_multipart_file_input: bool = False, - xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, ) -> typing.Any: return _RestField( name=name, @@ -1057,8 +1188,8 @@ def rest_discriminator( *, name: typing.Optional[str] = None, type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin - visibility: typing.Optional[typing.List[str]] = None, - xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + visibility: typing.Optional[list[str]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, ) -> typing.Any: return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) @@ -1074,21 +1205,77 @@ def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore +def _get_xml_ns(meta: dict[str, typing.Any]) -> typing.Optional[str]: + """Return the XML namespace from a metadata dict, checking both 'ns' (old-style) and 'namespace' (DPG) keys. + + :param dict meta: The metadata dictionary to extract namespace from. + :returns: The namespace string if 'ns' or 'namespace' key is present, None otherwise. + :rtype: str or None + """ + ns = meta.get("ns") + if ns is None: + ns = meta.get("namespace") + return ns + + +def _resolve_xml_ns( + prop_meta: dict[str, typing.Any], model_meta: typing.Optional[dict[str, typing.Any]] = None +) -> typing.Optional[str]: + """Resolve XML namespace for a property, falling back to model namespace when appropriate. + + Checks the property metadata first; if no namespace is found and the model does not declare + an explicit prefix, falls back to the model-level namespace. + + :param dict prop_meta: The property metadata dictionary. + :param dict model_meta: The model metadata dictionary, used as fallback. + :returns: The resolved namespace string, or None. + :rtype: str or None + """ + ns = _get_xml_ns(prop_meta) + if ns is None and model_meta is not None and not model_meta.get("prefix"): + ns = _get_xml_ns(model_meta) + return ns + + +def _set_xml_attribute(element: ET.Element, name: str, value: typing.Any, prop_meta: dict[str, typing.Any]) -> None: + """Set an XML attribute on an element, handling namespace prefix registration. + + :param ET.Element element: The element to set the attribute on. + :param str name: The default attribute name (wire name). + :param any value: The attribute value. + :param dict prop_meta: The property metadata dictionary. + """ + xml_name = prop_meta.get("name", name) + _attr_ns = _get_xml_ns(prop_meta) + if _attr_ns: + _attr_prefix = prop_meta.get("prefix") + if _attr_prefix: + _safe_register_namespace(_attr_prefix, _attr_ns) + xml_name = "{" + _attr_ns + "}" + xml_name + element.set(xml_name, _get_primitive_type_value(value)) + + def _get_element( o: typing.Any, exclude_readonly: bool = False, - parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + parent_meta: typing.Optional[dict[str, typing.Any]] = None, wrapped_element: typing.Optional[ET.Element] = None, -) -> typing.Union[ET.Element, typing.List[ET.Element]]: +) -> typing.Union[ET.Element, list[ET.Element]]: if _is_model(o): model_meta = getattr(o, "_xml", {}) # if prop is a model, then use the prop element directly, else generate a wrapper of model if wrapped_element is None: + # When serializing as an array item (parent_meta is set), check if the parent has an + # explicit itemsName. This ensures correct element names for unwrapped arrays (where + # the element tag is the property/items name, not the model type name). + _items_name = parent_meta.get("itemsName") if parent_meta is not None else None + element_name = _items_name if _items_name else (model_meta.get("name") or o.__class__.__name__) + _model_ns = _get_xml_ns(model_meta) wrapped_element = _create_xml_element( - model_meta.get("name", o.__class__.__name__), + element_name, model_meta.get("prefix"), - model_meta.get("ns"), + _model_ns, ) readonly_props = [] @@ -1110,7 +1297,9 @@ def _get_element( # additional properties will not have rest field, use the wire name as xml name prop_meta = {"name": k} - # if no ns for prop, use model's + # Propagate model namespace to properties only for old-style "ns"-keyed models. + # DPG-generated models use the "namespace" key and explicitly declare namespace on + # each property that needs it, so propagation is intentionally skipped for them. if prop_meta.get("ns") is None and model_meta.get("ns"): prop_meta["ns"] = model_meta.get("ns") prop_meta["prefix"] = model_meta.get("prefix") @@ -1122,12 +1311,7 @@ def _get_element( # text could only set on primitive type wrapped_element.text = _get_primitive_type_value(v) elif prop_meta.get("attribute", False): - xml_name = prop_meta.get("name", k) - if prop_meta.get("ns"): - ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore - xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore - # attribute should be primitive type - wrapped_element.set(xml_name, _get_primitive_type_value(v)) + _set_xml_attribute(wrapped_element, k, v, prop_meta) else: # other wrapped prop element wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) @@ -1136,6 +1320,7 @@ def _get_element( return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore if isinstance(o, dict): result = [] + _dict_ns = _get_xml_ns(parent_meta) if parent_meta else None for k, v in o.items(): result.append( _get_wrapped_element( @@ -1143,7 +1328,7 @@ def _get_element( exclude_readonly, { "name": k, - "ns": parent_meta.get("ns") if parent_meta else None, + "ns": _dict_ns, "prefix": parent_meta.get("prefix") if parent_meta else None, }, ) @@ -1152,13 +1337,16 @@ def _get_element( # primitive case need to create element based on parent_meta if parent_meta: + _items_ns = parent_meta.get("itemsNs") + if _items_ns is None: + _items_ns = _get_xml_ns(parent_meta) return _get_wrapped_element( o, exclude_readonly, { "name": parent_meta.get("itemsName", parent_meta.get("name")), "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), - "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + "ns": _items_ns, }, ) @@ -1168,10 +1356,11 @@ def _get_element( def _get_wrapped_element( v: typing.Any, exclude_readonly: bool, - meta: typing.Optional[typing.Dict[str, typing.Any]], + meta: typing.Optional[dict[str, typing.Any]], ) -> ET.Element: + _meta_ns = _get_xml_ns(meta) if meta else None wrapped_element = _create_xml_element( - meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + meta.get("name") if meta else None, meta.get("prefix") if meta else None, _meta_ns ) if isinstance(v, (dict, list)): wrapped_element.extend(_get_element(v, exclude_readonly, meta)) @@ -1179,7 +1368,7 @@ def _get_wrapped_element( _get_element(v, exclude_readonly, meta, wrapped_element) else: wrapped_element.text = _get_primitive_type_value(v) - return wrapped_element + return wrapped_element # type: ignore[no-any-return] def _get_primitive_type_value(v) -> str: @@ -1192,9 +1381,29 @@ def _get_primitive_type_value(v) -> str: return str(v) -def _create_xml_element(tag, prefix=None, ns=None): - if prefix and ns: +def _safe_register_namespace(prefix: str, ns: str) -> None: + """Register an XML namespace prefix, handling reserved prefix patterns. + + Some prefixes (e.g. 'ns2') match Python's reserved 'ns\\d+' pattern used for + auto-generated prefixes, causing register_namespace to raise ValueError. + Falls back to directly registering in the internal namespace map. + + :param str prefix: The namespace prefix to register. + :param str ns: The namespace URI. + """ + try: ET.register_namespace(prefix, ns) + except ValueError: + _ns_map = getattr(ET, "_namespace_map", None) + if _ns_map is not None: + _ns_map[ns] = prefix + + +def _create_xml_element( + tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None +) -> ET.Element: + if prefix and ns: + _safe_register_namespace(prefix, ns) if ns: return ET.Element("{" + ns + "}" + tag) return ET.Element(tag) @@ -1211,7 +1420,7 @@ def _deserialize_xml( def _convert_element(e: ET.Element): # dict case if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: - dict_result: typing.Dict[str, typing.Any] = {} + dict_result: dict[str, typing.Any] = {} for child in e: if dict_result.get(child.tag) is not None: if isinstance(dict_result[child.tag], list): @@ -1224,7 +1433,7 @@ def _convert_element(e: ET.Element): return dict_result # array case if len(e) > 0: - array_result: typing.List[typing.Any] = [] + array_result: list[typing.Any] = [] for child in e: array_result.append(_convert_element(child)) return array_result diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_utils/serialization.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_utils/serialization.py index eb86ea23c965..81ec1de5922b 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_utils/serialization.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_utils/serialization.py @@ -21,7 +21,6 @@ import sys import codecs from typing import ( - Dict, Any, cast, Optional, @@ -31,7 +30,6 @@ Mapping, Callable, MutableMapping, - List, ) try: @@ -229,12 +227,12 @@ class Model: serialization and deserialization. """ - _subtype_map: Dict[str, Dict[str, Any]] = {} - _attribute_map: Dict[str, Dict[str, Any]] = {} - _validation: Dict[str, Dict[str, Any]] = {} + _subtype_map: dict[str, dict[str, Any]] = {} + _attribute_map: dict[str, dict[str, Any]] = {} + _validation: dict[str, dict[str, Any]] = {} def __init__(self, **kwargs: Any) -> None: - self.additional_properties: Optional[Dict[str, Any]] = {} + self.additional_properties: Optional[dict[str, Any]] = {} for k in kwargs: # pylint: disable=consider-using-dict-items if k not in self._attribute_map: _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) @@ -311,7 +309,7 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: def as_dict( self, keep_readonly: bool = True, - key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer, **kwargs: Any ) -> JSON: """Return a dict that can be serialized using json.dump. @@ -380,7 +378,7 @@ def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: def from_dict( cls, data: Any, - key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None, content_type: Optional[str] = None, ) -> Self: """Parse a dict using given key extractor return a model. @@ -414,7 +412,7 @@ def _flatten_subtype(cls, key, objects): return {} result = dict(cls._subtype_map[key]) for valuetype in cls._subtype_map[key].values(): - result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access return result @classmethod @@ -528,7 +526,7 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: "[]": self.serialize_iter, "{}": self.serialize_dict, } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.dependencies: dict[str, type] = dict(classes) if classes else {} self.key_transformer = full_restapi_key_transformer self.client_side_validation = True @@ -579,7 +577,7 @@ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, to if attr_name == "additional_properties" and attr_desc["key"] == "": if target_obj.additional_properties is not None: - serialized.update(target_obj.additional_properties) + serialized |= target_obj.additional_properties continue try: @@ -789,7 +787,7 @@ def serialize_data(self, data, data_type, **kwargs): # If dependencies is empty, try with current data class # It has to be a subclass of Enum anyway - enum_type = self.dependencies.get(data_type, data.__class__) + enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) if issubclass(enum_type, Enum): return Serializer.serialize_enum(data, enum_obj=enum_type) @@ -823,13 +821,20 @@ def serialize_basic(cls, data, data_type, **kwargs): :param str data_type: Type of object in the iterable. :rtype: str, int, float, bool :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) @classmethod def serialize_unicode(cls, data): @@ -1184,7 +1189,7 @@ def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argumen while "." in key: # Need the cast, as for some reasons "split" is typed as list[str | Any] - dict_keys = cast(List[str], _FLATTEN.split(key)) + dict_keys = cast(list[str], _FLATTEN.split(key)) if len(dict_keys) == 1: key = _decode_attribute_map_key(dict_keys[0]) break @@ -1386,7 +1391,7 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: "duration": (isodate.Duration, datetime.timedelta), "iso-8601": (datetime.datetime), } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.dependencies: dict[str, type] = dict(classes) if classes else {} self.key_extractors = [rest_key_extractor, xml_key_extractor] # Additional properties only works if the "rest_key_extractor" is used to # extract the keys. Making it to work whatever the key extractor is too much @@ -1759,7 +1764,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return :param str data_type: deserialization data type. :return: Deserialized basic type. :rtype: str, int, float or bool - :raises TypeError: if string format is not valid. + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1785,7 +1790,11 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) @staticmethod def deserialize_unicode(data): diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_validation.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_validation.py index 752b2822f9d3..f5af3a4eb8a2 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_validation.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_validation.py @@ -10,6 +10,22 @@ def api_version_validation(**kwargs): params_added_on = kwargs.pop("params_added_on", {}) method_added_on = kwargs.pop("method_added_on", "") + api_versions_list = kwargs.pop("api_versions_list", []) + + def _index_with_default(value: str, default: int = -1) -> int: + """Get the index of value in lst, or return default if not found. + + :param value: The value to search for in the api_versions_list. + :type value: str + :param default: The default value to return if the value is not found. + :type default: int + :return: The index of the value in the list, or the default value if not found. + :rtype: int + """ + try: + return api_versions_list.index(value) + except ValueError: + return default def decorator(func): @functools.wraps(func) @@ -21,7 +37,7 @@ def wrapper(*args, **kwargs): except AttributeError: return func(*args, **kwargs) - if method_added_on > client_api_version: + if _index_with_default(method_added_on) > _index_with_default(client_api_version): raise ValueError( f"'{func.__name__}' is not available in API version " f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." @@ -31,7 +47,7 @@ def wrapper(*args, **kwargs): parameter: api_version for api_version, parameters in params_added_on.items() for parameter in parameters - if parameter in kwargs and api_version > client_api_version + if parameter in kwargs and _index_with_default(api_version) > _index_with_default(client_api_version) } if unsupported: raise ValueError( diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_version.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_version.py index bd412784f4e1..ecd03f6e9262 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_version.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "4.10.0b1" +VERSION = "4.10.0" diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_client.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_client.py index 3f8e48a8e50c..11ddf0a7723c 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_client.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_client.py @@ -16,13 +16,13 @@ from .._utils.serialization import Deserializer, Serializer from ._configuration import KeyVaultClientConfiguration -from ._operations import KeyVaultClientOperationsMixin +from ._operations import _KeyVaultClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential -class KeyVaultClient(KeyVaultClientOperationsMixin): +class KeyVaultClient(_KeyVaultClientOperationsMixin): """The key vault client performs cryptographic key operations and vault operations against the Key Vault service. @@ -30,8 +30,9 @@ class KeyVaultClient(KeyVaultClientOperationsMixin): :type vault_base_url: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :keyword api_version: The API version to use for this operation. Default value is "7.6". Note - that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-07-01". + Default value is "2025-07-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str """ diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_configuration.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_configuration.py index 7960dc38a2e8..68ac794d5103 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_configuration.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_configuration.py @@ -26,13 +26,14 @@ class KeyVaultClientConfiguration: # pylint: disable=too-many-instance-attribut :type vault_base_url: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :keyword api_version: The API version to use for this operation. Default value is "7.6". Note - that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-07-01". + Default value is "2025-07-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str """ def __init__(self, vault_base_url: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "7.6") + api_version: str = kwargs.pop("api_version", "2025-07-01") if vault_base_url is None: raise ValueError("Parameter 'vault_base_url' must not be None.") diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/__init__.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/__init__.py index d514f5e4b5be..79e1a2ccf3da 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/__init__.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/__init__.py @@ -12,14 +12,12 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import KeyVaultClientOperationsMixin # type: ignore +from ._operations import _KeyVaultClientOperationsMixin # type: ignore # pylint: disable=unused-import from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "KeyVaultClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/_operations.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/_operations.py index 33877dd1143c..88a1892f9ef2 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/_operations.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/_operations.py @@ -9,7 +9,7 @@ from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload import urllib.parse from azure.core import AsyncPipelineClient @@ -65,10 +65,10 @@ JSON = MutableMapping[str, Any] T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -class KeyVaultClientOperationsMixin( # pylint: disable=too-many-public-methods +class _KeyVaultClientOperationsMixin( # pylint: disable=too-many-public-methods ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], KeyVaultClientConfiguration] ): @@ -211,6 +211,7 @@ async def create_key( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -225,11 +226,14 @@ async def create_key( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -279,6 +283,7 @@ async def rotate_key(self, key_name: str, **kwargs: Any) -> _models.KeyBundle: } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -293,11 +298,14 @@ async def rotate_key(self, key_name: str, **kwargs: Any) -> _models.KeyBundle: except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -445,6 +453,7 @@ async def import_key( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -459,11 +468,14 @@ async def import_key( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -513,6 +525,7 @@ async def delete_key(self, key_name: str, **kwargs: Any) -> _models.DeletedKeyBu } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -527,11 +540,14 @@ async def delete_key(self, key_name: str, **kwargs: Any) -> _models.DeletedKeyBu except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.DeletedKeyBundle, response.json()) @@ -697,6 +713,7 @@ async def update_key( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -711,11 +728,14 @@ async def update_key( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -768,6 +788,7 @@ async def get_key(self, key_name: str, key_version: str, **kwargs: Any) -> _mode } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -782,11 +803,14 @@ async def get_key(self, key_name: str, key_version: str, **kwargs: Any) -> _mode except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -798,7 +822,7 @@ async def get_key(self, key_name: str, key_version: str, **kwargs: Any) -> _mode @distributed_trace def get_key_versions( self, key_name: str, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> AsyncIterable["_models.KeyItem"]: + ) -> AsyncItemPaged["_models.KeyItem"]: """Retrieves a list of individual key versions with the same key name. The full key identifier, attributes, and tags are provided in the response. This operation @@ -816,7 +840,7 @@ def get_key_versions( _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.KeyItem]] = kwargs.pop("cls", None) + cls: ClsType[list[_models.KeyItem]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -867,7 +891,10 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.KeyItem], deserialized.get("value", [])) + list_of_elem = _deserialize( + list[_models.KeyItem], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -883,7 +910,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) return pipeline_response @@ -891,7 +921,7 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace - def get_keys(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> AsyncIterable["_models.KeyItem"]: + def get_keys(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> AsyncItemPaged["_models.KeyItem"]: """List keys in the specified vault. Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the @@ -909,7 +939,7 @@ def get_keys(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> AsyncI _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.KeyItem]] = kwargs.pop("cls", None) + cls: ClsType[list[_models.KeyItem]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -959,7 +989,10 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.KeyItem], deserialized.get("value", [])) + list_of_elem = _deserialize( + list[_models.KeyItem], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -975,7 +1008,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) return pipeline_response @@ -1030,6 +1066,7 @@ async def backup_key(self, key_name: str, **kwargs: Any) -> _models.BackupKeyRes } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1044,11 +1081,14 @@ async def backup_key(self, key_name: str, **kwargs: Any) -> _models.BackupKeyRes except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.BackupKeyResult, response.json()) @@ -1198,6 +1238,7 @@ async def restore_key( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1212,11 +1253,14 @@ async def restore_key( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -1398,6 +1442,7 @@ async def encrypt( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1412,11 +1457,14 @@ async def encrypt( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyOperationResult, response.json()) @@ -1606,6 +1654,7 @@ async def decrypt( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1620,11 +1669,14 @@ async def decrypt( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyOperationResult, response.json()) @@ -1785,6 +1837,7 @@ async def sign( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1799,11 +1852,14 @@ async def sign( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyOperationResult, response.json()) @@ -1973,6 +2029,7 @@ async def verify( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1987,11 +2044,14 @@ async def verify( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyVerifyResult, response.json()) @@ -2165,6 +2225,7 @@ async def wrap_key( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2179,11 +2240,14 @@ async def wrap_key( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyOperationResult, response.json()) @@ -2349,6 +2413,7 @@ async def unwrap_key( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2363,11 +2428,14 @@ async def unwrap_key( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyOperationResult, response.json()) @@ -2529,6 +2597,7 @@ async def release( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2543,11 +2612,14 @@ async def release( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyReleaseResult, response.json()) @@ -2559,7 +2631,7 @@ async def release( @distributed_trace def get_deleted_keys( self, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> AsyncIterable["_models.DeletedKeyItem"]: + ) -> AsyncItemPaged["_models.DeletedKeyItem"]: """Lists the deleted keys in the specified vault. Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the @@ -2579,7 +2651,7 @@ def get_deleted_keys( _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.DeletedKeyItem]] = kwargs.pop("cls", None) + cls: ClsType[list[_models.DeletedKeyItem]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -2629,7 +2701,10 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.DeletedKeyItem], deserialized.get("value", [])) + list_of_elem = _deserialize( + list[_models.DeletedKeyItem], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -2645,7 +2720,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) return pipeline_response @@ -2692,6 +2770,7 @@ async def get_deleted_key(self, key_name: str, **kwargs: Any) -> _models.Deleted } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2706,11 +2785,14 @@ async def get_deleted_key(self, key_name: str, **kwargs: Any) -> _models.Deleted except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.DeletedKeyBundle, response.json()) @@ -2768,7 +2850,10 @@ async def purge_deleted_key(self, key_name: str, **kwargs: Any) -> None: if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if cls: @@ -2815,6 +2900,7 @@ async def recover_deleted_key(self, key_name: str, **kwargs: Any) -> _models.Key } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2829,11 +2915,14 @@ async def recover_deleted_key(self, key_name: str, **kwargs: Any) -> _models.Key except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) @@ -2881,6 +2970,7 @@ async def get_key_rotation_policy(self, key_name: str, **kwargs: Any) -> _models } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2895,11 +2985,14 @@ async def get_key_rotation_policy(self, key_name: str, **kwargs: Any) -> _models except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyRotationPolicy, response.json()) @@ -3031,6 +3124,7 @@ async def update_key_rotation_policy( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3045,11 +3139,14 @@ async def update_key_rotation_policy( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyRotationPolicy, response.json()) @@ -3163,6 +3260,7 @@ async def get_random_bytes( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3177,11 +3275,14 @@ async def get_random_bytes( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RandomBytes, response.json()) @@ -3192,8 +3293,9 @@ async def get_random_bytes( @distributed_trace_async @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "key_name", "key_version", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["key_name", "key_version", "accept", "api_version"]}, + api_versions_list=["7.6-preview.2", "7.6", "2025-06-01-preview", "2025-07-01"], ) async def get_key_attestation(self, key_name: str, key_version: str, **kwargs: Any) -> _models.KeyBundle: """Gets the public part of a stored key along with its attestation blob. @@ -3238,6 +3340,7 @@ async def get_key_attestation(self, key_name: str, key_version: str, **kwargs: A } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3252,11 +3355,14 @@ async def get_key_attestation(self, key_name: str, key_version: str, **kwargs: A except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.KeyBundle, response.json()) diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/models/_enums.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/models/_enums.py index bbd39559553f..e6ef90414b95 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/models/_enums.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/models/_enums.py @@ -20,22 +20,22 @@ class DeletionRecoveryLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Denotes a vault state in which deletion is an irreversible operation, without the possibility for recovery. This level corresponds to no protection being available against a Delete operation; the data is irretrievably lost upon accepting a Delete operation at the entity level - or higher (vault, resource group, subscription etc.)""" + or higher (vault, resource group, subscription etc.).""" RECOVERABLE_PURGEABLE = "Recoverable+Purgeable" """Denotes a vault state in which deletion is recoverable, and which also permits immediate and permanent deletion (i.e. purge). This level guarantees the recoverability of the deleted entity during the retention interval (90 days), unless a Purge operation is requested, or the - subscription is cancelled. System wil permanently delete it after 90 days, if not recovered""" + subscription is cancelled. System wil permanently delete it after 90 days, if not recovered.""" RECOVERABLE = "Recoverable" """Denotes a vault state in which deletion is recoverable without the possibility for immediate and permanent deletion (i.e. purge). This level guarantees the recoverability of the deleted entity during the retention interval(90 days) and while the subscription is still available. - System wil permanently delete it after 90 days, if not recovered""" + System wil permanently delete it after 90 days, if not recovered.""" RECOVERABLE_PROTECTED_SUBSCRIPTION = "Recoverable+ProtectedSubscription" """Denotes a vault and subscription state in which deletion is recoverable within retention interval (90 days), immediate and permanent deletion (i.e. purge) is not permitted, and in which the subscription itself cannot be permanently canceled. System wil permanently delete it - after 90 days, if not recovered""" + after 90 days, if not recovered.""" CUSTOMIZED_RECOVERABLE_PURGEABLE = "CustomizedRecoverable+Purgeable" """Denotes a vault state in which deletion is recoverable, and which also permits immediate and permanent deletion (i.e. purge when 7 <= SoftDeleteRetentionInDays < 90). This level guarantees @@ -145,45 +145,45 @@ class JsonWebKeySignatureAlgorithm(str, Enum, metaclass=CaseInsensitiveEnumMeta) PS256 = "PS256" """RSASSA-PSS using SHA-256 and MGF1 with SHA-256, as described in - `https://tools.ietf.org/html/rfc7518 `_""" + `https://tools.ietf.org/html/rfc7518 `_.""" PS384 = "PS384" """RSASSA-PSS using SHA-384 and MGF1 with SHA-384, as described in - `https://tools.ietf.org/html/rfc7518 `_""" + `https://tools.ietf.org/html/rfc7518 `_.""" PS512 = "PS512" """RSASSA-PSS using SHA-512 and MGF1 with SHA-512, as described in - `https://tools.ietf.org/html/rfc7518 `_""" + `https://tools.ietf.org/html/rfc7518 `_.""" RS256 = "RS256" """RSASSA-PKCS1-v1_5 using SHA-256, as described in `https://tools.ietf.org/html/rfc7518 - `_""" + `_.""" RS384 = "RS384" """RSASSA-PKCS1-v1_5 using SHA-384, as described in `https://tools.ietf.org/html/rfc7518 - `_""" + `_.""" RS512 = "RS512" """RSASSA-PKCS1-v1_5 using SHA-512, as described in `https://tools.ietf.org/html/rfc7518 - `_""" + `_.""" HS256 = "HS256" """HMAC using SHA-256, as described in `https://tools.ietf.org/html/rfc7518 - `_""" + `_.""" HS384 = "HS384" """HMAC using SHA-384, as described in `https://tools.ietf.org/html/rfc7518 - `_""" + `_.""" HS512 = "HS512" """HMAC using SHA-512, as described in `https://tools.ietf.org/html/rfc7518 - `_""" + `_.""" RSNULL = "RSNULL" - """Reserved""" + """Reserved.""" ES256 = "ES256" """ECDSA using P-256 and SHA-256, as described in `https://tools.ietf.org/html/rfc7518 `_.""" ES384 = "ES384" """ECDSA using P-384 and SHA-384, as described in `https://tools.ietf.org/html/rfc7518 - `_""" + `_.""" ES512 = "ES512" """ECDSA using P-521 and SHA-512, as described in `https://tools.ietf.org/html/rfc7518 - `_""" + `_.""" ES256_K = "ES256K" """ECDSA using P-256K and SHA-256, as described in `https://tools.ietf.org/html/rfc7518 - `_""" + `_.""" class JsonWebKeyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -197,11 +197,11 @@ class JsonWebKeyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): EC_HSM = "EC-HSM" """Elliptic Curve with a private key which is stored in the HSM.""" RSA = "RSA" - """RSA (`https://tools.ietf.org/html/rfc3447 `_)""" + """RSA (`https://tools.ietf.org/html/rfc3447 `_).""" RSA_HSM = "RSA-HSM" """RSA with a private key which is stored in the HSM.""" OCT = "oct" - """Octet sequence (used to represent symmetric keys)""" + """Octet sequence (used to represent symmetric keys).""" OCT_HSM = "oct-HSM" """Octet sequence (used to represent symmetric keys) which is stored the HSM.""" diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/models/_models.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/models/_models.py index 030ea63feed8..0517ee3a6f6d 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/models/_models.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/models/_models.py @@ -9,7 +9,7 @@ # pylint: disable=useless-super-delegation import datetime -from typing import Any, Dict, List, Mapping, Optional, TYPE_CHECKING, Union, overload +from typing import Any, Mapping, Optional, TYPE_CHECKING, Union, overload from .._utils.model_base import Model as _Model, rest_field @@ -57,7 +57,7 @@ class DeletedKeyBundle(_Model): visibility=["read", "create", "update", "delete", "query"] ) """The key management attributes.""" - tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Application specific metadata in the form of key-value pairs.""" managed: Optional[bool] = rest_field(visibility=["read"]) """True if the key's lifetime is managed by key vault. If this is a key backing a certificate, @@ -85,7 +85,7 @@ def __init__( *, key: Optional["_models.JsonWebKey"] = None, attributes: Optional["_models.KeyAttributes"] = None, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, release_policy: Optional["_models.KeyReleasePolicy"] = None, recovery_id: Optional[str] = None, ) -> None: ... @@ -128,7 +128,7 @@ class DeletedKeyItem(_Model): visibility=["read", "create", "update", "delete", "query"] ) """The key management attributes.""" - tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Application specific metadata in the form of key-value pairs.""" managed: Optional[bool] = rest_field(visibility=["read"]) """True if the key's lifetime is managed by key vault. If this is a key backing a certificate, @@ -152,7 +152,7 @@ def __init__( *, kid: Optional[str] = None, attributes: Optional["_models.KeyAttributes"] = None, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, recovery_id: Optional[str] = None, ) -> None: ... @@ -247,7 +247,7 @@ class JsonWebKey(_Model): `https://tools.ietf.org/html/draft-ietf-jose-json-web-algorithms-40 `_. Known values are: \"EC\", \"EC-HSM\", \"RSA\", \"RSA-HSM\", \"oct\", and \"oct-HSM\".""" - key_ops: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + key_ops: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Json web key operations. For more information on possible key operations, see JsonWebKeyOperation.""" n: Optional[bytes] = rest_field(visibility=["read", "create", "update", "delete", "query"], format="base64url") @@ -288,7 +288,7 @@ def __init__( *, kid: Optional[str] = None, kty: Optional[Union[str, "_models.JsonWebKeyType"]] = None, - key_ops: Optional[List[str]] = None, + key_ops: Optional[list[str]] = None, n: Optional[bytes] = None, e: Optional[bytes] = None, d: Optional[bytes] = None, @@ -478,7 +478,7 @@ class KeyBundle(_Model): visibility=["read", "create", "update", "delete", "query"] ) """The key management attributes.""" - tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Application specific metadata in the form of key-value pairs.""" managed: Optional[bool] = rest_field(visibility=["read"]) """True if the key's lifetime is managed by key vault. If this is a key backing a certificate, @@ -494,7 +494,7 @@ def __init__( *, key: Optional["_models.JsonWebKey"] = None, attributes: Optional["_models.KeyAttributes"] = None, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, release_policy: Optional["_models.KeyReleasePolicy"] = None, ) -> None: ... @@ -540,7 +540,7 @@ class KeyCreateParameters(_Model): """The key size in bits. For example: 2048, 3072, or 4096 for RSA.""" public_exponent: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The public exponent for a RSA key.""" - key_ops: Optional[List[Union[str, "_models.JsonWebKeyOperation"]]] = rest_field( + key_ops: Optional[list[Union[str, "_models.JsonWebKeyOperation"]]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) """Json web key operations. For more information on possible key operations, see @@ -549,7 +549,7 @@ class KeyCreateParameters(_Model): name="attributes", visibility=["read", "create", "update", "delete", "query"] ) """The attributes of a key managed by the key vault service.""" - tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Application specific metadata in the form of key-value pairs.""" curve: Optional[Union[str, "_models.JsonWebKeyCurveName"]] = rest_field( name="crv", visibility=["read", "create", "update", "delete", "query"] @@ -568,9 +568,9 @@ def __init__( kty: Union[str, "_models.JsonWebKeyType"], key_size: Optional[int] = None, public_exponent: Optional[int] = None, - key_ops: Optional[List[Union[str, "_models.JsonWebKeyOperation"]]] = None, + key_ops: Optional[list[Union[str, "_models.JsonWebKeyOperation"]]] = None, key_attributes: Optional["_models.KeyAttributes"] = None, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, curve: Optional[Union[str, "_models.JsonWebKeyCurveName"]] = None, release_policy: Optional["_models.KeyReleasePolicy"] = None, ) -> None: ... @@ -609,7 +609,7 @@ class KeyImportParameters(_Model): name="attributes", visibility=["read", "create", "update", "delete", "query"] ) """The key management attributes.""" - tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Application specific metadata in the form of key-value pairs.""" release_policy: Optional["_models.KeyReleasePolicy"] = rest_field( visibility=["read", "create", "update", "delete", "query"] @@ -623,7 +623,7 @@ def __init__( key: "_models.JsonWebKey", hsm: Optional[bool] = None, key_attributes: Optional["_models.KeyAttributes"] = None, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, release_policy: Optional["_models.KeyReleasePolicy"] = None, ) -> None: ... @@ -658,7 +658,7 @@ class KeyItem(_Model): visibility=["read", "create", "update", "delete", "query"] ) """The key management attributes.""" - tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Application specific metadata in the form of key-value pairs.""" managed: Optional[bool] = rest_field(visibility=["read"]) """True if the key's lifetime is managed by key vault. If this is a key backing a certificate, @@ -670,7 +670,7 @@ def __init__( *, kid: Optional[str] = None, attributes: Optional["_models.KeyAttributes"] = None, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -921,7 +921,7 @@ class KeyRotationPolicy(_Model): id: Optional[str] = rest_field(visibility=["read"]) """The key policy id.""" - lifetime_actions: Optional[List["_models.LifetimeActions"]] = rest_field( + lifetime_actions: Optional[list["_models.LifetimeActions"]] = rest_field( name="lifetimeActions", visibility=["read", "create", "update", "delete", "query"] ) """Actions that will be performed by Key Vault over the lifetime of a key. For preview, @@ -936,7 +936,7 @@ class KeyRotationPolicy(_Model): def __init__( self, *, - lifetime_actions: Optional[List["_models.LifetimeActions"]] = None, + lifetime_actions: Optional[list["_models.LifetimeActions"]] = None, attributes: Optional["_models.KeyRotationPolicyAttributes"] = None, ) -> None: ... @@ -1048,7 +1048,7 @@ class KeyUpdateParameters(_Model): :vartype release_policy: ~azure.keyvault.keys._generated.models.KeyReleasePolicy """ - key_ops: Optional[List[Union[str, "_models.JsonWebKeyOperation"]]] = rest_field( + key_ops: Optional[list[Union[str, "_models.JsonWebKeyOperation"]]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) """Json web key operations. For more information on possible key operations, see @@ -1057,7 +1057,7 @@ class KeyUpdateParameters(_Model): name="attributes", visibility=["read", "create", "update", "delete", "query"] ) """The attributes of a key managed by the key vault service.""" - tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Application specific metadata in the form of key-value pairs.""" release_policy: Optional["_models.KeyReleasePolicy"] = rest_field( visibility=["read", "create", "update", "delete", "query"] @@ -1068,9 +1068,9 @@ class KeyUpdateParameters(_Model): def __init__( self, *, - key_ops: Optional[List[Union[str, "_models.JsonWebKeyOperation"]]] = None, + key_ops: Optional[list[Union[str, "_models.JsonWebKeyOperation"]]] = None, key_attributes: Optional["_models.KeyAttributes"] = None, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, release_policy: Optional["_models.KeyReleasePolicy"] = None, ) -> None: ... diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_models.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_models.py index 99d6409a2666..bff16a7210b9 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_models.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_models.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -312,7 +313,9 @@ def attestation(self) -> Optional[KeyAttestation]: # attestation was added in 7.6-preview.2 if self._attributes: attestation = getattr(self._attributes, "attestation", None) - return KeyAttestation._from_generated(attestation=attestation) if attestation else None # pylint:disable=protected-access + return ( + KeyAttestation._from_generated(attestation=attestation) if attestation else None + ) # pylint:disable=protected-access return None @@ -411,7 +414,8 @@ def _from_generated(cls, policy: "_models.KeyRotationPolicy") -> "KeyRotationPol [] if policy.lifetime_actions is None else [ - KeyRotationLifetimeAction._from_generated(action) for action in policy.lifetime_actions # pylint:disable=protected-access + KeyRotationLifetimeAction._from_generated(action) + for action in policy.lifetime_actions # pylint:disable=protected-access ] ) if policy.attributes: diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_shared/async_challenge_auth_policy.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_shared/async_challenge_auth_policy.py index 0f84607e3ccd..3e3ac1855178 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_shared/async_challenge_auth_policy.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_shared/async_challenge_auth_policy.py @@ -66,7 +66,6 @@ async def await_result(func: Callable[P, Union[T, Awaitable[T]]], *args: P.args, return result - class AsyncChallengeAuthPolicy(AsyncBearerTokenCredentialPolicy): """Policy for handling HTTP authentication challenges. @@ -83,9 +82,7 @@ def __init__(self, credential: AsyncTokenProvider, *scopes: str, **kwargs: Any) self._verify_challenge_resource = kwargs.pop("verify_challenge_resource", True) self._request_copy: Optional[HttpRequest] = None - async def send( - self, request: PipelineRequest[HttpRequest] - ) -> PipelineResponse[HttpRequest, AsyncHttpResponse]: + async def send(self, request: PipelineRequest[HttpRequest]) -> PipelineResponse[HttpRequest, AsyncHttpResponse]: """Authorize request with a bearer token and send it to the next policy. We implement this method to account for the valid scenario where a Key Vault authentication challenge is @@ -156,7 +153,6 @@ async def handle_challenge_flow( await await_result(self.on_response, request, response) return response - async def on_request(self, request: PipelineRequest) -> None: _enforce_tls(request) challenge = ChallengeCache.get_challenge_for_url(request.http_request.url) @@ -227,9 +223,7 @@ async def on_challenge(self, request: PipelineRequest, response: PipelineRespons if challenge.tenant_id and challenge.tenant_id.lower().endswith("adfs"): await self.authorize_request(request, scope, claims=challenge.claims) else: - await self.authorize_request( - request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id - ) + await self.authorize_request(request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id) return True diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/aio/_client.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/aio/_client.py index 2f1c30a96b01..41084614a416 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/aio/_client.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/aio/_client.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -84,12 +85,12 @@ def _get_attributes( return None def get_cryptography_client( - self, - key_name: str, - *, - key_version: Optional[str] = None, - **kwargs, # pylint: disable=unused-argument - ) -> CryptographyClient: + self, + key_name: str, + *, + key_version: Optional[str] = None, + **kwargs, # pylint: disable=unused-argument + ) -> CryptographyClient: """Gets a :class:`~azure.keyvault.keys.crypto.aio.CryptographyClient` for the given key. :param str key_name: The name of the key used to perform cryptographic operations. @@ -647,7 +648,7 @@ async def recover_deleted_key(self, name: str, **kwargs: Any) -> KeyVaultKey: command=command, final_resource=recovered_key, finished=False, - interval=polling_interval + interval=polling_interval, ) await polling_method.run() @@ -841,9 +842,7 @@ async def import_key( release_policy=policy, ) - bundle = await self._client.import_key( - name, parameters=parameters, **kwargs - ) + bundle = await self._client.import_key(name, parameters=parameters, **kwargs) return KeyVaultKey._from_key_bundle(bundle) @distributed_trace_async @@ -996,7 +995,7 @@ async def update_key_rotation_policy( # pylint: disable=unused-argument @distributed_trace_async async def get_key_attestation(self, name: str, version: Optional[str] = None, **kwargs: Any) -> KeyVaultKey: """Get a key and its attestation blob. - + This method is applicable to any key stored in Azure Key Vault Managed HSM. This operation requires the keys/get permission. diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_client.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_client.py index d3a27fee66df..51f273d8f858 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_client.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_client.py @@ -32,13 +32,13 @@ def _validate_arguments( - operation: KeyOperation, - algorithm: EncryptionAlgorithm, - *, - iv: Optional[bytes] = None, - tag: Optional[bytes] = None, - aad: Optional[bytes] = None, - ) -> None: + operation: KeyOperation, + algorithm: EncryptionAlgorithm, + *, + iv: Optional[bytes] = None, + tag: Optional[bytes] = None, + aad: Optional[bytes] = None, +) -> None: """Validates the arguments passed to perform an operation with a provided algorithm. :param KeyOperation operation: the type of operation being requested @@ -55,9 +55,7 @@ def _validate_arguments( """ if operation == KeyOperation.encrypt: if iv and "CBC" not in algorithm: - raise ValueError( - f"iv should only be provided with AES-CBC algorithms; {algorithm} does not accept an iv" - ) + raise ValueError(f"iv should only be provided with AES-CBC algorithms; {algorithm} does not accept an iv") if iv is None and "CBC" in algorithm: raise ValueError("iv is a required parameter for encryption with AES-CBC algorithms.") if aad and not ("CBC" in algorithm or "GCM" in algorithm): @@ -68,9 +66,7 @@ def _validate_arguments( if operation == KeyOperation.decrypt: if iv and not ("CBC" in algorithm or "GCM" in algorithm): - raise ValueError( - f"iv should only be provided with AES algorithms; {algorithm} does not accept an iv" - ) + raise ValueError(f"iv should only be provided with AES algorithms; {algorithm} does not accept an iv") if iv is None and ("CBC" in algorithm or "GCM" in algorithm): raise ValueError("iv is a required parameter for decryption with AES algorithms.") if tag and "GCM" not in algorithm: @@ -203,7 +199,7 @@ def _initialize(self, **kwargs: Any) -> None: key_bundle = self._client.get_key( self._key_id.name if self._key_id else None, self._key_id.version if self._key_id else None, - **kwargs + **kwargs, ) key = KeyVaultKey._from_key_bundle(key_bundle) self._key = key.key @@ -310,7 +306,7 @@ def encrypt( parameters=self._models.KeyOperationsParameters( algorithm=algorithm, value=plaintext, iv=iv, aad=additional_authenticated_data ), - **kwargs + **kwargs, ) result_iv = operation_result.iv if hasattr(operation_result, "iv") else None @@ -400,7 +396,7 @@ def decrypt( parameters=self._models.KeyOperationsParameters( algorithm=algorithm, value=ciphertext, iv=iv, tag=authentication_tag, aad=additional_authenticated_data ), - **kwargs + **kwargs, ) return DecryptResult(key_id=self.key_id, algorithm=algorithm, plaintext=operation_result.result) @@ -443,7 +439,7 @@ def wrap_key(self, algorithm: KeyWrapAlgorithm, key: bytes, **kwargs: Any) -> Wr key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeyOperationsParameters(algorithm=algorithm, value=key), - **kwargs + **kwargs, ) return WrapResult(key_id=self.key_id, algorithm=algorithm, encrypted_key=operation_result.result) @@ -485,7 +481,7 @@ def unwrap_key(self, algorithm: KeyWrapAlgorithm, encrypted_key: bytes, **kwargs key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeyOperationsParameters(algorithm=algorithm, value=encrypted_key), - **kwargs + **kwargs, ) return UnwrapResult(key_id=self.key_id, algorithm=algorithm, key=operation_result.result) @@ -527,7 +523,7 @@ def sign(self, algorithm: SignatureAlgorithm, digest: bytes, **kwargs: Any) -> S key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeySignParameters(algorithm=algorithm, value=digest), - **kwargs + **kwargs, ) return SignResult(key_id=self.key_id, algorithm=algorithm, signature=operation_result.result) @@ -571,7 +567,7 @@ def verify(self, algorithm: SignatureAlgorithm, digest: bytes, signature: bytes, key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeyVerifyParameters(algorithm=algorithm, digest=digest, signature=signature), - **kwargs + **kwargs, ) return VerifyResult(key_id=self.key_id, algorithm=algorithm, is_valid=operation_result.value) diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_enums.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_enums.py index fcce8d0929e7..356b72b5edc5 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_enums.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_enums.py @@ -5,6 +5,7 @@ from enum import Enum from azure.core import CaseInsensitiveEnumMeta + # pylint: disable=enum-must-be-uppercase class KeyWrapAlgorithm(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Key wrapping algorithms""" diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_internal/rsa_key.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_internal/rsa_key.py index e1325894bccc..0d3f21df4257 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_internal/rsa_key.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_internal/rsa_key.py @@ -33,7 +33,14 @@ class RsaKey(Key): # pylint:disable=too-many-public-methods _supported_encryption_algorithms = frozenset((Rsa1_5.name(), RsaOaep.name(), RsaOaep256.name())) _supported_key_wrap_algorithms = frozenset((Rsa1_5.name(), RsaOaep.name(), RsaOaep256.name())) _supported_signature_algorithms = frozenset( - (Ps256.name(), Ps384.name(), Ps512.name(), Rs256.name(), Rs384.name(), Rs512.name(),) + ( + Ps256.name(), + Ps384.name(), + Ps512.name(), + Rs256.name(), + Rs384.name(), + Rs512.name(), + ) ) def __init__(self, kid=None): diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py index cbd65e4da0e7..b39a8ca064c4 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/aio/_client.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/aio/_client.py index 9f7e370e983f..13111932ebbe 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/aio/_client.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/aio/_client.py @@ -146,7 +146,7 @@ async def _initialize(self, **kwargs: Any) -> None: key_bundle = await self._client.get_key( self._key_id.name if self._key_id else None, self._key_id.version if self._key_id else None, - **kwargs + **kwargs, ) key = KeyVaultKey._from_key_bundle(key_bundle) self._key = key.key @@ -229,7 +229,7 @@ async def encrypt( parameters=self._models.KeyOperationsParameters( algorithm=algorithm, value=plaintext, iv=iv, aad=additional_authenticated_data ), - **kwargs + **kwargs, ) result_iv = operation_result.iv if hasattr(operation_result, "iv") else None @@ -319,7 +319,7 @@ async def decrypt( parameters=self._models.KeyOperationsParameters( algorithm=algorithm, value=ciphertext, iv=iv, tag=authentication_tag, aad=additional_authenticated_data ), - **kwargs + **kwargs, ) return DecryptResult(key_id=self.key_id, algorithm=algorithm, plaintext=operation_result.result) @@ -362,7 +362,7 @@ async def wrap_key(self, algorithm: KeyWrapAlgorithm, key: bytes, **kwargs: Any) key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeyOperationsParameters(algorithm=algorithm, value=key), - **kwargs + **kwargs, ) return WrapResult(key_id=self.key_id, algorithm=algorithm, encrypted_key=operation_result.result) @@ -404,7 +404,7 @@ async def unwrap_key(self, algorithm: KeyWrapAlgorithm, encrypted_key: bytes, ** key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeyOperationsParameters(algorithm=algorithm, value=encrypted_key), - **kwargs + **kwargs, ) return UnwrapResult(key_id=self.key_id, algorithm=algorithm, key=operation_result.result) @@ -447,7 +447,7 @@ async def sign(self, algorithm: SignatureAlgorithm, digest: bytes, **kwargs: Any key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeySignParameters(algorithm=algorithm, value=digest), - **kwargs + **kwargs, ) return SignResult(key_id=self.key_id, algorithm=algorithm, signature=operation_result.result) @@ -493,7 +493,7 @@ async def verify( key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeyVerifyParameters(algorithm=algorithm, digest=digest, signature=signature), - **kwargs + **kwargs, ) return VerifyResult(key_id=self.key_id, algorithm=algorithm, is_valid=operation_result.value) diff --git a/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations.py b/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations.py index bdf7d1305fd3..2e3f83c2ff66 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -14,7 +15,7 @@ # 2. azure-keyvault-keys and azure-identity libraries (pip install these) # # 3. Set environment variable VAULT_URL with the URL of your key vault -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # diff --git a/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations_async.py b/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations_async.py index f6990ab87997..efef9cd89af4 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations_async.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -34,13 +35,14 @@ # 5. Restore a key (restore_key_backup) # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): # Instantiate a key client that will be used to call the service. # Here we use the DefaultAzureCredential, but any azure-identity credential can be used. VAULT_URL = os.environ["VAULT_URL"] credential = DefaultAzureCredential() client = KeyClient(vault_url=VAULT_URL, credential=credential) - + # Let's create a Key of type RSA. # if the key already exists in the Key Vault, then a new version of the key is created. print("\n.. Create Key") diff --git a/sdk/keyvault/azure-keyvault-keys/samples/hello_world.py b/sdk/keyvault/azure-keyvault-keys/samples/hello_world.py index afb096383708..e323e555334f 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/hello_world.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/hello_world.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -12,7 +13,7 @@ # 2. azure-keyvault-keys and azure-identity libraries (pip install these) # # 3. Set environment variable VAULT_URL with the URL of your key vault -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # @@ -70,9 +71,7 @@ # associated with a key previously stored within Key Vault. print("\n.. Update a Key by name") expires = datetime.datetime.utcnow() + datetime.timedelta(days=365) -updated_ec_key = client.update_key_properties( - ec_key.name, ec_key.properties.version, expires_on=expires, enabled=False -) +updated_ec_key = client.update_key_properties(ec_key.name, ec_key.properties.version, expires_on=expires, enabled=False) print(f"Key with name '{updated_ec_key.name}' was updated on date '{updated_ec_key.properties.updated_on}'") print(f"Key with name '{updated_ec_key.name}' was updated to expire on '{updated_ec_key.properties.expires_on}'") diff --git a/sdk/keyvault/azure-keyvault-keys/samples/hello_world_async.py b/sdk/keyvault/azure-keyvault-keys/samples/hello_world_async.py index 864f14750aaa..87d3728d5253 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/hello_world_async.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/hello_world_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -35,6 +36,7 @@ # 5. Delete a key (delete_key) # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): # Instantiate a key client that will be used to call the service. # Here we use the DefaultAzureCredential, but any azure-identity credential can be used. diff --git a/sdk/keyvault/azure-keyvault-keys/samples/key_rotation.py b/sdk/keyvault/azure-keyvault-keys/samples/key_rotation.py index 248ac051899e..731f4d743805 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/key_rotation.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/key_rotation.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -14,7 +15,7 @@ # 2. azure-keyvault-keys and azure-identity libraries (pip install these) # # 3. Set environment variable VAULT_URL with the URL of your key vault -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # diff --git a/sdk/keyvault/azure-keyvault-keys/samples/key_rotation_async.py b/sdk/keyvault/azure-keyvault-keys/samples/key_rotation_async.py index c20f614943a7..4ebad1c8cf59 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/key_rotation_async.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/key_rotation_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -36,6 +37,7 @@ # 5. Delete a key (delete_key) # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): # Instantiate a key client that will be used to call the service. # Here we use the DefaultAzureCredential, but any azure-identity credential can be used. @@ -108,4 +110,4 @@ async def run_sample(): if __name__ == "__main__": - asyncio.run(run_sample()) \ No newline at end of file + asyncio.run(run_sample()) diff --git a/sdk/keyvault/azure-keyvault-keys/samples/list_operations.py b/sdk/keyvault/azure-keyvault-keys/samples/list_operations.py index f8b01807efc4..cf42f7006bb2 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/list_operations.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/list_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -13,7 +14,7 @@ # 2. azure-keyvault-keys and azure-identity libraries (pip install these) # # 3. Set environment variable VAULT_URL with the URL of your key vault -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # diff --git a/sdk/keyvault/azure-keyvault-keys/samples/list_operations_async.py b/sdk/keyvault/azure-keyvault-keys/samples/list_operations_async.py index 757017e45e00..e1a2c535508e 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/list_operations_async.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/list_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -35,6 +36,7 @@ # # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): # Instantiate a key client that will be used to call the service. # Here we use the DefaultAzureCredential, but any azure-identity credential can be used. diff --git a/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations.py b/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations.py index 9b5b45985c97..7668cf456ac8 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -13,7 +14,7 @@ # 2. azure-keyvault-keys and azure-identity libraries (pip install these) # # 3. Set environment variable VAULT_URL with the URL of your key vault -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # diff --git a/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations_async.py b/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations_async.py index bcb8eb588df3..8d4f1c138c36 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations_async.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -7,6 +8,7 @@ from azure.identity.aio import DefaultAzureCredential from azure.keyvault.keys.aio import KeyClient + # ---------------------------------------------------------------------------------------------------------- # Prerequisites: # 1. An Azure Key Vault (https://learn.microsoft.com/azure/key-vault/quick-create-cli) diff --git a/sdk/keyvault/azure-keyvault-keys/samples/send_request.py b/sdk/keyvault/azure-keyvault-keys/samples/send_request.py index 78d35dc3c8f2..c5e3f2b5a9c3 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/send_request.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/send_request.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -14,7 +15,7 @@ # 2. azure-keyvault-keys and azure-identity libraries (pip install these) # # 3. Set environment variable VAULT_URL with the URL of your key vault -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # @@ -55,7 +56,7 @@ response = client.send_request(request) # The return value is an azure.core.rest.HttpResponse -- the key information is in the response body. -# We can get a dictionary of the body content with the `json` method. +# We can get a dictionary of the body content with the `json` method. response_body = response.json() print(f"\n.. Key with ID {response_body['key']['kid']} was found.") diff --git a/sdk/keyvault/azure-keyvault-keys/tests/_keys_test_case.py b/sdk/keyvault/azure-keyvault-keys/tests/_keys_test_case.py index d1e99ba811e3..d23ab5e15853 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/_keys_test_case.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/_keys_test_case.py @@ -13,7 +13,7 @@ def _get_attestation_uri(self): playback_uri = "https://fakeattestation.azurewebsites.net" if self.is_live: real_uri = os.environ.get("AZURE_KEYVAULT_ATTESTATION_URL") - real_uri = real_uri.rstrip('/') + real_uri = real_uri.rstrip("/") if real_uri is None: pytest.skip("No AZURE_KEYVAULT_ATTESTATION_URL environment variable") return real_uri @@ -22,9 +22,11 @@ def _get_attestation_uri(self): def create_crypto_client(self, key, **kwargs): if kwargs.pop("is_async", False): from azure.keyvault.keys.crypto.aio import CryptographyClient - credential = self.get_credential(CryptographyClient,is_async=True) + + credential = self.get_credential(CryptographyClient, is_async=True) else: from azure.keyvault.keys.crypto import CryptographyClient + credential = self.get_credential(CryptographyClient) return self.create_client_from_credential(CryptographyClient, credential=credential, key=key, **kwargs) diff --git a/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case.py b/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case.py index a67376fd53e1..87a1198ea5de 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case.py @@ -8,8 +8,6 @@ from devtools_testutils import AzureRecordedTestCase - - class KeyVaultTestCase(AzureRecordedTestCase): def get_resource_name(self, name): """helper to create resources with a consistent, test-indicative prefix""" diff --git a/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case_async.py b/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case_async.py index 6059c528f1a3..fb26f89f3ab3 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case_async.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case_async.py @@ -36,7 +36,7 @@ async def _poll_until_exception(self, fn, expected_exception, max_retries=20, re except expected_exception: return self.fail("expected exception {expected_exception} was not raised") - + def teardown_method(self, method): HttpChallengeCache.clear() assert len(HttpChallengeCache._cache) == 0 diff --git a/sdk/keyvault/azure-keyvault-keys/tests/conftest.py b/sdk/keyvault/azure-keyvault-keys/tests/conftest.py index 32b2c439318a..8e6ecf471467 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/conftest.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/conftest.py @@ -13,16 +13,22 @@ add_oauth_response_sanitizer, is_live, remove_batch_sanitizers, - set_custom_default_matcher + set_custom_default_matcher, ) from azure.keyvault.keys._shared.client_base import DEFAULT_VERSION, ApiVersion -os.environ['PYTHONHASHSEED'] = '0' +os.environ["PYTHONHASHSEED"] = "0" ALL_API_VERSIONS = "--all-api-versions" + def pytest_addoption(parser): - parser.addoption(ALL_API_VERSIONS, action="store_true", default=False, - help="Test all api version in live mode. Not applicable in playback mode.") + parser.addoption( + ALL_API_VERSIONS, + action="store_true", + default=False, + help="Test all api version in live mode. Not applicable in playback mode.", + ) + def pytest_configure(config): if is_live() and not config.getoption(ALL_API_VERSIONS): @@ -30,16 +36,19 @@ def pytest_configure(config): else: pytest.api_version = ApiVersion + @pytest.fixture(scope="session", autouse=True) def add_sanitizers(test_proxy): azure_keyvault_url = os.getenv("AZURE_KEYVAULT_URL", "https://vaultname.vault.azure.net") azure_keyvault_url = azure_keyvault_url.rstrip("/") keyvault_tenant_id = os.getenv("KEYVAULT_TENANT_ID", "keyvault_tenant_id") keyvault_subscription_id = os.getenv("KEYVAULT_SUBSCRIPTION_ID", "keyvault_subscription_id") - azure_managedhsm_url = os.environ.get("AZURE_MANAGEDHSM_URL","https://managedhsmvaultname.managedhsm.azure.net") + azure_managedhsm_url = os.environ.get("AZURE_MANAGEDHSM_URL", "https://managedhsmvaultname.managedhsm.azure.net") azure_managedhsm_url = azure_managedhsm_url.rstrip("/") - azure_attestation_uri = os.environ.get("AZURE_KEYVAULT_ATTESTATION_URL","https://fakeattestation.azurewebsites.net") - azure_attestation_uri = azure_attestation_uri.rstrip('/') + azure_attestation_uri = os.environ.get( + "AZURE_KEYVAULT_ATTESTATION_URL", "https://fakeattestation.azurewebsites.net" + ) + azure_attestation_uri = azure_attestation_uri.rstrip("/") add_general_string_sanitizer(target=azure_keyvault_url, value="https://vaultname.vault.azure.net") add_general_string_sanitizer(target=keyvault_tenant_id, value="00000000-0000-0000-0000-000000000000") @@ -52,7 +61,12 @@ def add_sanitizers(test_proxy): # Remove the following sanitizers since certain fields are needed in tests and are non-sensitive: # - AZSDK3430: $..id # - AZSDK3447: $.key - remove_batch_sanitizers(["AZSDK3430", "AZSDK3447",]) + remove_batch_sanitizers( + [ + "AZSDK3430", + "AZSDK3447", + ] + ) @pytest.fixture(scope="session", autouse=True) @@ -80,6 +94,7 @@ def immediate_return(_): else: yield + @pytest.fixture(scope="session") def event_loop(request): loop = asyncio.new_event_loop() diff --git a/sdk/keyvault/azure-keyvault-keys/tests/keys.py b/sdk/keyvault/azure-keyvault-keys/tests/keys.py index 80db438ae5f2..428b82eb21e0 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/keys.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/keys.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-keys/tests/perfstress_tests/sign.py b/sdk/keyvault/azure-keyvault-keys/tests/perfstress_tests/sign.py index e98cbd1ce11c..469de42d8a35 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/perfstress_tests/sign.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/perfstress_tests/sign.py @@ -25,6 +25,7 @@ def __init__(self, arguments): super().__init__(arguments) from dotenv import load_dotenv + load_dotenv() # Auth configuration diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth.py b/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth.py index 35d27d63b297..5c32754a86ee 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth.py @@ -36,6 +36,7 @@ TOKEN_TYPES = [AccessToken, AccessTokenInfo] + class TestChallengeAuth(KeyVaultTestCase, KeysTestCase): @pytest.mark.parametrize("api_version,is_hsm", only_default_version) @KeysClientPreparer() @@ -125,7 +126,6 @@ def test_enforces_tls(): pipeline.run(HttpRequest("GET", url)) - def test_challenge_cache(): url_a = get_random_url() challenge_a = HttpChallenge(url_a, "Bearer authorization=authority A, resource=resource A") @@ -148,9 +148,7 @@ def test_challenge_parsing(): tenant = "tenant" authority = f"https://login.authority.net/{tenant}" resource = "https://challenge.resource" - challenge = HttpChallenge( - "https://request.uri", challenge=f"Bearer authorization={authority}, resource={resource}" - ) + challenge = HttpChallenge("https://request.uri", challenge=f"Bearer authorization={authority}, resource={resource}") assert challenge.get_authorization_server() == authority assert challenge.get_resource() == resource @@ -580,8 +578,8 @@ def get_token(*_, **__): mock_response( status_code=401, headers={"WWW-Authenticate": f'Bearer authorization="{url}", resource={resource}'} ), - mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}) - ] + mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}), + ], ) transport_2 = validating_transport( requests=[Request(), Request(required_headers={"Authorization": f"Bearer {token}"})], @@ -589,8 +587,8 @@ def get_token(*_, **__): mock_response( status_code=401, headers={"WWW-Authenticate": f'Bearer authorization="{url}", resource={resource}'} ), - mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}) - ] + mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}), + ], ) client = KeyClient(url, credential, transport=transport, verify_challenge_resource=verify_challenge_resource) @@ -635,8 +633,8 @@ def get_token(*_, **__): mock_response( status_code=401, headers={"WWW-Authenticate": f'Bearer authorization="{url}", resource={resource}'} ), - mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}) - ] + mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}), + ], ) client = KeyClient(url, credential, transport=transport, verify_challenge_resource=verify_challenge_resource) diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth_async.py b/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth_async.py index bfec049a17c8..76ccf739df16 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth_async.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth_async.py @@ -19,7 +19,7 @@ from azure.core.pipeline import AsyncPipeline from azure.core.pipeline.policies import SansIOHTTPPolicy from azure.core.rest import HttpRequest -from azure.keyvault.keys._shared import AsyncChallengeAuthPolicy,HttpChallenge, HttpChallengeCache +from azure.keyvault.keys._shared import AsyncChallengeAuthPolicy, HttpChallenge, HttpChallengeCache from azure.keyvault.keys._shared.client_base import DEFAULT_VERSION from azure.keyvault.keys.aio import KeyClient from devtools_testutils.aio import recorded_by_proxy_async @@ -46,7 +46,7 @@ class TestChallengeAuth(KeyVaultTestCase): @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_default_version) + @pytest.mark.parametrize("api_version,is_hsm", only_default_version) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_multitenant_authentication(self, client, is_hsm, **kwargs): @@ -132,9 +132,7 @@ async def get_token(*scopes, **_): credential = Mock(spec_set=["get_token"], get_token=Mock(wraps=get_token)) else: credential = Mock(spec_set=["get_token_info"], get_token_info=Mock(wraps=get_token)) - pipeline = AsyncPipeline( - policies=[AsyncChallengeAuthPolicy(credential=credential)], transport=Mock(send=send) - ) + pipeline = AsyncPipeline(policies=[AsyncChallengeAuthPolicy(credential=credential)], transport=Mock(send=send)) request = HttpRequest("POST", get_random_url()) request.set_bytes_body(expected_content) await pipeline.run(request) @@ -201,9 +199,7 @@ async def get_token(*_, options=None, **kwargs): credential = Mock(spec_set=["get_token"], get_token=Mock(wraps=get_token)) else: credential = Mock(spec_set=["get_token_info"], get_token_info=Mock(wraps=get_token)) - pipeline = AsyncPipeline( - policies=[AsyncChallengeAuthPolicy(credential=credential)], transport=Mock(send=send) - ) + pipeline = AsyncPipeline(policies=[AsyncChallengeAuthPolicy(credential=credential)], transport=Mock(send=send)) request = HttpRequest("POST", get_random_url()) request.set_bytes_body(expected_content) await pipeline.run(request) @@ -496,8 +492,8 @@ async def get_token(*_, **__): mock_response( status_code=401, headers={"WWW-Authenticate": f'Bearer authorization="{url}", resource={resource}'} ), - mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}) - ] + mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}), + ], ) transport_2 = async_validating_transport( requests=[Request(), Request(required_headers={"Authorization": f"Bearer {token}"})], @@ -505,8 +501,8 @@ async def get_token(*_, **__): mock_response( status_code=401, headers={"WWW-Authenticate": f'Bearer authorization="{url}", resource={resource}'} ), - mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}) - ] + mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}), + ], ) client = KeyClient(url, credential, transport=transport, verify_challenge_resource=verify_challenge_resource) @@ -551,8 +547,8 @@ async def get_token(*_, **__): mock_response( status_code=401, headers={"WWW-Authenticate": f'Bearer authorization="{url}", resource={resource}'} ), - mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}) - ] + mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}), + ], ) client = KeyClient(url, credential, transport=transport, verify_challenge_resource=verify_challenge_resource) diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client.py b/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client.py index 45cb23d5b84b..382bee35dfb7 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -20,7 +21,7 @@ rsa_crt_dmq1, rsa_crt_iqmp, RSAPrivateNumbers, - RSAPublicNumbers + RSAPublicNumbers, ) from cryptography.hazmat.primitives.serialization import Encoding, NoEncryption, PrivateFormat, PublicFormat import pytest @@ -60,21 +61,21 @@ def _to_bytes(hex): # RSA key with private components so that the JWK can be used for private operations TEST_JWK = { - "kty":"RSA", - "key_ops":["decrypt", "verify", "unwrapKey"], - "n":_to_bytes( + "kty": "RSA", + "key_ops": ["decrypt", "verify", "unwrapKey"], + "n": _to_bytes( "00a0914d00234ac683b21b4c15d5bed887bdc959c2e57af54ae734e8f00720d775d275e455207e3784ceeb60a50a4655dd72a7a94d271e8ee8f7959a669ca6e775bf0e23badae991b4529d978528b4bd90521d32dd2656796ba82b6bbfc7668c8f5eeb5053747fd199319d29a8440d08f4412d527ff9311eda71825920b47b1c46b11ab3e91d7316407e89c7f340f7b85a34042ce51743b27d4718403d34c7b438af6181be05e4d11eb985d38253d7fe9bf53fc2f1b002d22d2d793fa79a504b6ab42d0492804d7071d727a06cf3a8893aa542b1503f832b296371b6707d4dc6e372f8fe67d8ded1c908fde45ce03bc086a71487fa75e43aa0e0679aa0d20efe35" ), - "e":_to_bytes("10001"), - "p":_to_bytes( + "e": _to_bytes("10001"), + "p": _to_bytes( "00d1deac8d68ddd2c1fd52d5999655b2cf1565260de5269e43fd2a85f39280e1708ffff0682166cb6106ee5ea5e9ffd9f98d0becc9ff2cda2febc97259215ad84b9051e563e14a051dce438bc6541a24ac4f014cf9732d36ebfc1e61a00d82cbe412090f7793cfbd4b7605be133dfc3991f7e1bed5786f337de5036fc1e2df4cf3" ), - "q":_to_bytes( + "q": _to_bytes( "00c3dc66b641a9b73cd833bc439cd34fc6574465ab5b7e8a92d32595a224d56d911e74624225b48c15a670282a51c40d1dad4bc2e9a3c8dab0c76f10052dfb053bc6ed42c65288a8e8bace7a8881184323f94d7db17ea6dfba651218f931a93b8f738f3d8fd3f6ba218d35b96861a0f584b0ab88ddcf446b9815f4d287d83a3237" ), - "d":_to_bytes( + "d": _to_bytes( "627c7d24668148fe2252c7fa649ea8a5a9ed44d75c766cda42b29b660e99404f0e862d4561a6c95af6a83d213e0a2244b03cd28576473215073785fb067f015da19084ade9f475e08b040a9a2c7ba00253bb8125508c9df140b75161d266be347a5e0f6900fe1d8bbf78ccc25eeb37e0c9d188d6e1fc15169ba4fe12276193d77790d2326928bd60d0d01d6ead8d6ac4861abadceec95358fd6689c50a1671a4a936d2376440a41445501da4e74bfb98f823bd19c45b94eb01d98fc0d2f284507f018ebd929b8180dbe6381fdd434bffb7800aaabdd973d55f9eaf9bb88a6ea7b28c2a80231e72de1ad244826d665582c2362761019de2e9f10cb8bcc2625649" - ) + ), } @@ -111,9 +112,10 @@ def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops assert key.kty == kty, f"kty should by '{key}', but is '{key.kty}'" assert key.n and key.e, "Bad RSA public material." assert sorted(key_ops) == sorted(key.key_ops), f"keyOps should be '{key_ops}', but is '{key.key_ops}'" - - assert key_attributes.properties.created_on and key_attributes.properties.updated_on, "Missing required date attributes." - + + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kty): prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name]) @@ -122,7 +124,9 @@ def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kt assert key_curve == key.crv assert kid.index(prefix) == 0, f"Key Id should start with '{prefix}', but value is '{kid}'" assert key.kty == kty, f"kty should by '{key}', but is '{key.kty}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on,"Missing required date attributes." + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." def _import_test_key(self, client, name, hardware_protected=False): key = JsonWebKey( @@ -169,7 +173,7 @@ def _import_symmetric_test_key(self, client, name): assert key_vault_key.key.kid == imported_key.id == key_vault_key.id return key_vault_key - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_ec_key_id(self, key_client, is_hsm, **kwargs): @@ -185,7 +189,7 @@ def test_ec_key_id(self, key_client, is_hsm, **kwargs): crypto_client.verify(SignatureAlgorithm.es256_k, hashlib.sha256(self.plaintext).digest(), self.plaintext) - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_rsa_key_id(self, key_client, is_hsm, **kwargs): @@ -399,7 +403,7 @@ def test_symmetric_encrypt_and_decrypt(self, key_client, **kwargs): encrypt_result.ciphertext, iv=encrypt_result.iv, authentication_tag=encrypt_result.tag, - additional_authenticated_data=self.aad + additional_authenticated_data=self.aad, ) else: encrypt_result = crypto_client.encrypt( @@ -410,13 +414,15 @@ def test_symmetric_encrypt_and_decrypt(self, key_client, **kwargs): encrypt_result.algorithm, encrypt_result.ciphertext, iv=encrypt_result.iv, - additional_authenticated_data=None if "CBC" in algorithm else self.aad + additional_authenticated_data=None if "CBC" in algorithm else self.aad, ) assert decrypt_result.key_id == imported_key.id assert decrypt_result.algorithm == algorithm if algorithm.endswith("CBC"): - assert decrypt_result.plaintext.startswith(self.plaintext) # AES-CBC returns a zero-padded plaintext + assert decrypt_result.plaintext.startswith( + self.plaintext + ) # AES-CBC returns a zero-padded plaintext else: assert decrypt_result.plaintext == self.plaintext @@ -436,7 +442,7 @@ def test_symmetric_wrap_and_unwrap(self, key_client, **kwargs): result = crypto_client.unwrap_key(result.algorithm, result.encrypted_key) assert result.key == self.plaintext - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_encrypt_local(self, key_client, is_hsm, **kwargs): @@ -453,7 +459,7 @@ def test_encrypt_local(self, key_client, is_hsm, **kwargs): result = crypto_client.decrypt(result.algorithm, result.ciphertext) assert result.plaintext == self.plaintext - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_encrypt_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -470,8 +476,8 @@ def test_encrypt_local_from_jwk(self, key_client, is_hsm, **kwargs): result = crypto_client.decrypt(result.algorithm, result.ciphertext) assert result.plaintext == self.plaintext - - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @KeysClientPreparer() @recorded_by_proxy def test_symmetric_encrypt_local(self, key_client, **kwargs): @@ -499,7 +505,7 @@ def test_symmetric_encrypt_local(self, key_client, **kwargs): assert decrypt_result.key_id == imported_key.id assert decrypt_result.algorithm == algorithm assert decrypt_result.plaintext == self.plaintext - + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @KeysClientPreparer() @recorded_by_proxy @@ -524,14 +530,14 @@ def test_symmetric_decrypt_local(self, key_client, **kwargs): encrypt_result.algorithm, encrypt_result.ciphertext, iv=encrypt_result.iv, - additional_authenticated_data=self.aad + additional_authenticated_data=self.aad, ) assert decrypt_result.key_id == imported_key.id assert decrypt_result.algorithm == algorithm assert decrypt_result.plaintext == self.plaintext - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_wrap_local(self, key_client, is_hsm, **kwargs): @@ -547,7 +553,7 @@ def test_wrap_local(self, key_client, is_hsm, **kwargs): result = crypto_client.unwrap_key(result.algorithm, result.encrypted_key) assert result.key == self.plaintext - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_wrap_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -564,7 +570,7 @@ def test_wrap_local_from_jwk(self, key_client, is_hsm, **kwargs): result = crypto_client.unwrap_key(result.algorithm, result.encrypted_key) assert result.key == self.plaintext - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_rsa_verify_local(self, key_client, is_hsm, **kwargs): @@ -589,7 +595,7 @@ def test_rsa_verify_local(self, key_client, is_hsm, **kwargs): result = crypto_client.verify(result.algorithm, digest, result.signature) assert result.is_valid - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_rsa_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -600,12 +606,12 @@ def test_rsa_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): crypto_client = self.create_crypto_client(key, api_version=key_client.api_version) local_client = CryptographyClient.from_jwk(key.key) for signature_algorithm, hash_function in ( - (SignatureAlgorithm.ps256, hashlib.sha256), - (SignatureAlgorithm.ps384, hashlib.sha384), - (SignatureAlgorithm.ps512, hashlib.sha512), - (SignatureAlgorithm.rs256, hashlib.sha256), - (SignatureAlgorithm.rs384, hashlib.sha384), - (SignatureAlgorithm.rs512, hashlib.sha512), + (SignatureAlgorithm.ps256, hashlib.sha256), + (SignatureAlgorithm.ps384, hashlib.sha384), + (SignatureAlgorithm.ps512, hashlib.sha512), + (SignatureAlgorithm.rs256, hashlib.sha256), + (SignatureAlgorithm.rs384, hashlib.sha384), + (SignatureAlgorithm.rs512, hashlib.sha512), ): digest = hash_function(self.plaintext).digest() @@ -615,7 +621,7 @@ def test_rsa_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): result = local_client.verify(result.algorithm, digest, result.signature) assert result.is_valid - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_ec_verify_local(self, key_client, is_hsm, **kwargs): @@ -640,7 +646,7 @@ def test_ec_verify_local(self, key_client, is_hsm, **kwargs): result = crypto_client.verify(result.algorithm, digest, result.signature) assert result.is_valid - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_ec_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -666,11 +672,12 @@ def test_ec_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): result = local_client.verify(result.algorithm, digest, result.signature) assert result.is_valid - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_local_validity_period_enforcement(self, key_client, is_hsm, **kwargs): """Local crypto operations should respect a key's nbf and exp properties""" + def test_operations(key, expected_error_substrings, encrypt_algorithms, wrap_algorithms): crypto_client = self.create_crypto_client(key, api_version=key_client.api_version) for algorithm in encrypt_algorithms: @@ -713,7 +720,7 @@ def test_operations(key, expected_error_substrings, encrypt_algorithms, wrap_alg valid_key, (str(the_year_3000), str(the_year_3001)), rsa_encryption_algorithms, rsa_wrap_algorithms ) - @pytest.mark.parametrize("api_version,is_hsm",only_vault_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_vault_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_send_request(self, key_client, is_hsm, **kwargs): @@ -734,7 +741,7 @@ def test_send_request(self, key_client, is_hsm, **kwargs): method="POST", url=f"keys/{key_name}/{imported_key.properties.version}/sign", headers={"Accept": "application/json"}, - json=json + json=json, ) response = crypto_client.send_request(request) response.raise_for_status() @@ -1081,7 +1088,7 @@ def test_rsa_public_key_public_bytes(): public_numbers = public_key.public_numbers() crypto_public_numbers = RSAPublicNumbers(e=public_numbers.e, n=public_numbers.n) crypto_public_bytes = crypto_public_numbers.public_key().public_bytes(Encoding.PEM, PublicFormat.PKCS1) - assert public_bytes == crypto_public_bytes + assert public_bytes == crypto_public_bytes def test_rsa_public_key_private_key_size(): diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client_async.py b/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client_async.py index 6b043c430710..d101f0507fe1 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client_async.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -70,8 +71,9 @@ def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops assert key.kty == kty, f"kty should by '{key}', but is '{key.kty}'" assert key.n and key.e, "Bad RSA public material." assert sorted(key_ops) == sorted(key.key_ops), f"keyOps should be '{key_ops}', but is '{key.key_ops}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on,"Missing required date attributes." - + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kty): prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name]) @@ -80,7 +82,9 @@ def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kt assert key_curve == key.crv assert kid.index(prefix) == 0, f"Key Id should start with '{prefix}', but value is '{kid}'" assert key.kty == kty, f"kty should by '{key}', but is '{key.kty}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on,"Missing required date attributes." + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." async def _import_test_key(self, client, name, hardware_protected=False): def _to_bytes(hex): @@ -133,7 +137,7 @@ async def _import_symmetric_test_key(self, client, name): return key_vault_key @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_ec_key_id(self, key_client, is_hsm, **kwargs): @@ -150,7 +154,7 @@ async def test_ec_key_id(self, key_client, is_hsm, **kwargs): await crypto_client.verify(SignatureAlgorithm.es256, hashlib.sha256(self.plaintext).digest(), self.plaintext) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_rsa_key_id(self, key_client, is_hsm, **kwargs): @@ -169,7 +173,7 @@ async def test_rsa_key_id(self, key_client, is_hsm, **kwargs): await crypto_client.wrap_key(KeyWrapAlgorithm.rsa_oaep, self.plaintext) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_encrypt_and_decrypt(self, key_client, is_hsm, **kwargs): @@ -188,7 +192,7 @@ async def test_encrypt_and_decrypt(self, key_client, is_hsm, **kwargs): assert self.plaintext == result.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_sign_and_verify(self, key_client, is_hsm, **kwargs): @@ -211,7 +215,7 @@ async def test_sign_and_verify(self, key_client, is_hsm, **kwargs): assert verified.is_valid @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_wrap_and_unwrap(self, key_client, is_hsm, **kwargs): @@ -231,7 +235,7 @@ async def test_wrap_and_unwrap(self, key_client, is_hsm, **kwargs): assert key_bytes == result.key @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_symmetric_encrypt_and_decrypt(self, key_client, **kwargs): @@ -257,7 +261,7 @@ async def test_symmetric_encrypt_and_decrypt(self, key_client, **kwargs): result.ciphertext, iv=result.iv, authentication_tag=result.tag, - additional_authenticated_data=self.aad + additional_authenticated_data=self.aad, ) else: result = await crypto_client.encrypt( @@ -268,7 +272,7 @@ async def test_symmetric_encrypt_and_decrypt(self, key_client, **kwargs): result.algorithm, result.ciphertext, iv=self.iv, - additional_authenticated_data=None if "CBC" in algorithm else self.aad + additional_authenticated_data=None if "CBC" in algorithm else self.aad, ) assert result.key_id == imported_key.id @@ -279,7 +283,7 @@ async def test_symmetric_encrypt_and_decrypt(self, key_client, **kwargs): assert result.plaintext == self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_symmetric_wrap_and_unwrap(self, key_client, **kwargs): @@ -296,7 +300,7 @@ async def test_symmetric_wrap_and_unwrap(self, key_client, **kwargs): assert result.key == self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_encrypt_local(self, key_client, is_hsm, **kwargs): @@ -333,7 +337,7 @@ async def test_encrypt_local_from_jwk(self, key_client, is_hsm, **kwargs): assert result.plaintext, self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_symmetric_encrypt_local(self, key_client, **kwargs): @@ -363,7 +367,7 @@ async def test_symmetric_encrypt_local(self, key_client, **kwargs): assert decrypt_result.plaintext == self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_symmetric_decrypt_local(self, key_client, **kwargs): @@ -387,7 +391,7 @@ async def test_symmetric_decrypt_local(self, key_client, **kwargs): encrypt_result.algorithm, encrypt_result.ciphertext, iv=encrypt_result.iv, - additional_authenticated_data=self.aad + additional_authenticated_data=self.aad, ) assert decrypt_result.key_id == imported_key.id @@ -395,7 +399,7 @@ async def test_symmetric_decrypt_local(self, key_client, **kwargs): assert decrypt_result.plaintext == self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_wrap_local(self, key_client, is_hsm, **kwargs): @@ -412,7 +416,7 @@ async def test_wrap_local(self, key_client, is_hsm, **kwargs): assert result.key, self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_wrap_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -430,7 +434,7 @@ async def test_wrap_local_from_jwk(self, key_client, is_hsm, **kwargs): assert result.key, self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_rsa_verify_local(self, key_client, is_hsm, **kwargs): @@ -456,7 +460,7 @@ async def test_rsa_verify_local(self, key_client, is_hsm, **kwargs): assert result.is_valid @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_rsa_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -467,12 +471,12 @@ async def test_rsa_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): crypto_client = self.create_crypto_client(key, is_async=True, api_version=key_client.api_version) local_client = CryptographyClient.from_jwk(key.key) for signature_algorithm, hash_function in ( - (SignatureAlgorithm.ps256, hashlib.sha256), - (SignatureAlgorithm.ps384, hashlib.sha384), - (SignatureAlgorithm.ps512, hashlib.sha512), - (SignatureAlgorithm.rs256, hashlib.sha256), - (SignatureAlgorithm.rs384, hashlib.sha384), - (SignatureAlgorithm.rs512, hashlib.sha512), + (SignatureAlgorithm.ps256, hashlib.sha256), + (SignatureAlgorithm.ps384, hashlib.sha384), + (SignatureAlgorithm.ps512, hashlib.sha512), + (SignatureAlgorithm.rs256, hashlib.sha256), + (SignatureAlgorithm.rs384, hashlib.sha384), + (SignatureAlgorithm.rs512, hashlib.sha512), ): digest = hash_function(self.plaintext).digest() @@ -483,7 +487,7 @@ async def test_rsa_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): assert result.is_valid @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_ec_verify_local(self, key_client, is_hsm, **kwargs): @@ -509,7 +513,7 @@ async def test_ec_verify_local(self, key_client, is_hsm, **kwargs): assert result.is_valid @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_ec_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -536,11 +540,12 @@ async def test_ec_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): assert result.is_valid @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_local_validity_period_enforcement(self, key_client, is_hsm, **kwargs): """Local crypto operations should respect a key's nbf and exp properties""" + async def test_operations(key, expected_error_substrings, encrypt_algorithms, wrap_algorithms): crypto_client = self.create_crypto_client(key, is_async=True, api_version=key_client.api_version) crypto_client._keys_get_forbidden = True # Prevent caching key material locally, to force remote ops @@ -587,7 +592,7 @@ async def test_operations(key, expected_error_substrings, encrypt_algorithms, wr ) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_vault_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_vault_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_send_request(self, key_client, is_hsm, **kwargs): @@ -608,7 +613,7 @@ async def test_send_request(self, key_client, is_hsm, **kwargs): method="POST", url=f"keys/{key_name}/{imported_key.properties.version}/sign", headers={"Accept": "application/json"}, - json=json + json=json, ) response = await crypto_client.send_request(request) response.raise_for_status() @@ -632,7 +637,10 @@ class CustomHookPolicy(SansIOHTTPPolicy): @pytest.mark.asyncio async def test_symmetric_wrap_and_unwrap_local(): key = KeyVaultKey( - key_id="http://localhost/keys/key/version", k=os.urandom(32), kty="oct", key_ops=["unwrapKey", "wrapKey"], + key_id="http://localhost/keys/key/version", + k=os.urandom(32), + kty="oct", + key_ops=["unwrapKey", "wrapKey"], ) crypto_client = CryptographyClient(key, credential=lambda *_: None) @@ -795,7 +803,7 @@ async def test_local_only_mode_no_service_calls(): async def test_local_only_mode_raise(): """A local-only CryptographyClient should raise an exception if an operation can't be performed locally""" - jwk = {"kty":"RSA", "key_ops":["decrypt", "verify", "unwrapKey"], "n":b"10011", "e":b"10001"} + jwk = {"kty": "RSA", "key_ops": ["decrypt", "verify", "unwrapKey"], "n": b"10011", "e": b"10001"} client = CryptographyClient.from_jwk(jwk=jwk) # Algorithm not supported locally @@ -908,7 +916,7 @@ async def test_aes_cbc_iv_validation(): @pytest.mark.asyncio async def test_encrypt_argument_validation(): """The client should raise an error when arguments don't work with the specified algorithm""" - + mock_client = mock.Mock() key = mock.Mock( spec=KeyVaultKey, diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto.py b/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto.py index a1cdd20eeb4d..82f3aef073b2 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto.py @@ -12,6 +12,7 @@ all_api_versions = get_decorator(only_vault=True) + class TestCryptoExamples(KeyVaultTestCase, KeysTestCase): @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @@ -59,7 +60,9 @@ def test_wrap_unwrap(self, key_client, **kwargs): key = key_client.create_rsa_key(key_name) client = CryptographyClient(key, credential, api_version=key_client.api_version) - key_bytes = b'\xc5\xb0\xfc\xf1C\x8a\x88pj\x11\x8d\xe5\x94\xe8\xff\x04\x0eY\xfeu\x8a\xe9<\x06(\xdb\x7f\xa9~\x85\x02\x04' + key_bytes = ( + b"\xc5\xb0\xfc\xf1C\x8a\x88pj\x11\x8d\xe5\x94\xe8\xff\x04\x0eY\xfeu\x8a\xe9<\x06(\xdb\x7f\xa9~\x85\x02\x04" + ) # [START wrap_key] from azure.keyvault.keys.crypto import KeyWrapAlgorithm diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto_async.py b/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto_async.py index cd5d9716d87c..a25ad3d18181 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto_async.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto_async.py @@ -16,7 +16,7 @@ class TestCryptoExamples(KeyVaultTestCase): @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_encrypt_decrypt_async(self, key_client, **kwargs): @@ -59,7 +59,7 @@ async def test_encrypt_decrypt_async(self, key_client, **kwargs): # [END decrypt] @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_wrap_unwrap_async(self, key_client, **kwargs): @@ -68,7 +68,9 @@ async def test_wrap_unwrap_async(self, key_client, **kwargs): key = await key_client.create_rsa_key(key_name) client = CryptographyClient(key, credential, api_version=key_client.api_version) - key_bytes = b'\xc5\xb0\xfc\xf1C\x8a\x88pj\x11\x8d\xe5\x94\xe8\xff\x04\x0eY\xfeu\x8a\xe9<\x06(\xdb\x7f\xa9~\x85\x02\x04' + key_bytes = ( + b"\xc5\xb0\xfc\xf1C\x8a\x88pj\x11\x8d\xe5\x94\xe8\xff\x04\x0eY\xfeu\x8a\xe9<\x06(\xdb\x7f\xa9~\x85\x02\x04" + ) # [START wrap_key] from azure.keyvault.keys.crypto import KeyWrapAlgorithm @@ -87,7 +89,7 @@ async def test_wrap_unwrap_async(self, key_client, **kwargs): # [END unwrap_key] @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_sign_verify_async(self, key_client, **kwargs): diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_key_client.py b/sdk/keyvault/azure-keyvault-keys/tests/test_key_client.py index 9fe97370442c..18d2a9d8c8f2 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_key_client.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_key_client.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -23,7 +24,7 @@ KeyRotationLifetimeAction, KeyRotationPolicy, KeyRotationPolicyAction, - KeyType + KeyType, ) from azure.keyvault.keys._generated.models import KeyRotationPolicy as _KeyRotationPolicy from azure.keyvault.keys._shared.client_base import DEFAULT_VERSION @@ -54,6 +55,7 @@ def _assert_rotation_policies_equal(p1, p2): assert p1.updated_on == p2.updated_on assert len(p1.lifetime_actions) == len(p2.lifetime_actions) + def _assert_lifetime_actions_equal(a1, a2): assert a1.action == a2.action assert a1.time_after_create == a2.time_after_create @@ -115,8 +117,9 @@ def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kt assert key_curve == key.crv assert kid.index(prefix) == 0, f"Key Id should start with '{prefix}', but value is '{kid}'" assert key.kty == kty, f"kty should be '{kty}', but is '{key.kty}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on,"Missing required date attributes." - + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops): prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name]) @@ -126,7 +129,9 @@ def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops assert key.kty == kty, f"kty should be '{kty}', but is '{key.kty}'" assert key.n and key.e, "Bad RSA public material." assert sorted(key_ops) == sorted(key.key_ops), f"keyOps should be '{key_ops}', but is '{key.key_ops}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on, "Missing required date attributes." + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." def _update_key_properties(self, client, key, release_policy=None): expires = date_parse.parse("2050-01-02T08:00:00.000Z") @@ -184,7 +189,7 @@ def _to_bytes(hex): self._validate_rsa_key_bundle(imported_key, client.vault_url, name, key.kty, key.key_ops) return imported_key - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_key_crud_operations(self, client, is_hsm, **kwargs): @@ -210,7 +215,7 @@ def test_key_crud_operations(self, client, is_hsm, **kwargs): # create rsa key rsa_key_name = self.get_resource_name("crud-rsa-key") tags = {"purpose": "unit test", "test name ": "CreateRSAKeyTest"} - key_ops = ["encrypt","decrypt","sign","verify","wrapKey","unwrapKey"] + key_ops = ["encrypt", "decrypt", "sign", "verify", "wrapKey", "unwrapKey"] rsa_key = self._create_rsa_key( client, key_name=rsa_key_name, key_operations=key_ops, size=2048, tags=tags, hardware_protected=is_hsm ) @@ -239,8 +244,10 @@ def test_key_crud_operations(self, client, is_hsm, **kwargs): # aside from key_ops, the original updated keys should have the same JWKs self._assert_jwks_equal(rsa_key.key, deleted_key.key) assert deleted_key.id == rsa_key.id - assert deleted_key.recovery_id and deleted_key.deleted_date and deleted_key.scheduled_purge_date, "Missing required deleted key attributes." - + assert ( + deleted_key.recovery_id and deleted_key.deleted_date and deleted_key.scheduled_purge_date + ), "Missing required deleted key attributes." + deleted_key_poller.wait() # get the deleted key when soft deleted enabled @@ -248,7 +255,7 @@ def test_key_crud_operations(self, client, is_hsm, **kwargs): assert deleted_key is not None assert rsa_key.id == deleted_key.id - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @KeysClientPreparer() @recorded_by_proxy def test_rsa_public_exponent(self, client, **kwargs): @@ -260,7 +267,7 @@ def test_rsa_public_exponent(self, client, **kwargs): public_exponent = key.key.e[0] assert public_exponent == 17 - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_backup_restore(self, client, is_hsm, **kwargs): @@ -286,7 +293,7 @@ def test_backup_restore(self, client, is_hsm, **kwargs): restored_key = self._poll_until_no_exception(restore_function, ResourceExistsError) self._assert_key_attributes_equal(created_bundle.properties, restored_key.properties) - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_key_list(self, client, is_hsm, **kwargs): @@ -309,7 +316,7 @@ def test_key_list(self, client, is_hsm, **kwargs): del expected[key.name] assert len(expected) == 0 - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_list_versions(self, client, is_hsm, **kwargs): @@ -336,7 +343,7 @@ def test_list_versions(self, client, is_hsm, **kwargs): assert 0 == len(expected) @pytest.mark.skip("Temporarily disabled due to service issue") - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_list_deleted_keys(self, client, is_hsm, **kwargs): @@ -367,7 +374,7 @@ def test_list_deleted_keys(self, client, is_hsm, **kwargs): del expected[key.name] @pytest.mark.skip("Temporarily disabled due to service issue") - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_recover(self, client, is_hsm, **kwargs): @@ -393,7 +400,7 @@ def test_recover(self, client, is_hsm, **kwargs): expected_key = keys[key_name] self._assert_key_attributes_equal(expected_key.properties, recovered_key.properties) - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_purge(self, client, is_hsm, **kwargs): @@ -424,8 +431,8 @@ def test_purge(self, client, is_hsm, **kwargs): deleted = [s.name for s in client.list_deleted_keys()] assert not any(s in deleted for s in key_names) - @pytest.mark.parametrize("api_version,is_hsm",logging_enabled) - @KeysClientPreparer(logging_enable = True) + @pytest.mark.parametrize("api_version,is_hsm", logging_enabled) + @KeysClientPreparer(logging_enable=True) @recorded_by_proxy def test_logging_enabled(self, client, is_hsm, **kwargs): mock_handler = MockHandler() @@ -459,8 +466,8 @@ def test_logging_enabled(self, client, is_hsm, **kwargs): mock_handler.close() assert False, "Expected request body wasn't logged" - @pytest.mark.parametrize("api_version,is_hsm",logging_enabled) - @KeysClientPreparer(logging_enable = False) + @pytest.mark.parametrize("api_version,is_hsm", logging_enabled) + @KeysClientPreparer(logging_enable=False) @recorded_by_proxy def test_logging_disabled(self, client, is_hsm, **kwargs): mock_handler = MockHandler() @@ -493,7 +500,7 @@ def test_logging_disabled(self, client, is_hsm, **kwargs): mock_handler.close() - @pytest.mark.parametrize("api_version,is_hsm",only_hsm_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_get_random_bytes(self, client, **kwargs): @@ -509,11 +516,11 @@ def test_get_random_bytes(self, client, **kwargs): assert all(random_bytes != rb for rb in generated_random_bytes) generated_random_bytes.append(random_bytes) - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_key_release(self, client, is_hsm, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") if is_hsm and client.api_version == ApiVersion.V7_5: pytest.skip("Currently failing on 7.5-preview.1; skipping for now") @@ -538,7 +545,7 @@ def test_key_release(self, client, is_hsm, **kwargs): if self.is_live and "Target environment attestation statement cannot be verified" in ex.message: pytest.skip("Target environment attestation statement cannot be verified. Likely transient failure.") - @pytest.mark.parametrize("api_version,is_hsm",only_hsm_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_imported_key_release(self, client, **kwargs): @@ -560,11 +567,11 @@ def test_imported_key_release(self, client, **kwargs): release_result = client.release_key(imported_key_name, attestation) assert release_result.value - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_update_release_policy(self, client, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") if client.api_version == ApiVersion.V7_5: pytest.skip("Currently failing on 7.5-preview.1; skipping for now") @@ -584,17 +591,9 @@ def test_update_release_policy(self, client, **kwargs): new_release_policy_json = { "anyOf": [ - { - "anyOf": [ - { - "claim": "sdk-test", - "equals": False - } - ], - "authority": attestation_uri.rstrip("/") + "/" - } + {"anyOf": [{"claim": "sdk-test", "equals": False}], "authority": attestation_uri.rstrip("/") + "/"} ], - "version": "1.0.0" + "version": "1.0.0", } policy_string = json.dumps(new_release_policy_json).encode() new_release_policy = KeyReleasePolicy(policy_string) @@ -605,12 +604,12 @@ def test_update_release_policy(self, client, **kwargs): claim_condition = claim_condition if isinstance(claim_condition, bool) else json.loads(claim_condition) assert claim_condition is False - #Immutable policies aren't currently supported on Managed HSM - @pytest.mark.parametrize("api_version,is_hsm",only_vault_7_4_plus) + # Immutable policies aren't currently supported on Managed HSM + @pytest.mark.parametrize("api_version,is_hsm", only_vault_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_immutable_release_policy(self, client, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") attestation_uri = self._get_attestation_uri() @@ -624,17 +623,9 @@ def test_immutable_release_policy(self, client, **kwargs): new_release_policy_json = { "anyOf": [ - { - "anyOf": [ - { - "claim": "sdk-test", - "equals": False - } - ], - "authority": attestation_uri.rstrip("/") + "/" - } + {"anyOf": [{"claim": "sdk-test", "equals": False}], "authority": attestation_uri.rstrip("/") + "/"} ], - "version": "1.0.0" + "version": "1.0.0", } policy_string = json.dumps(new_release_policy_json).encode() new_release_policy = KeyReleasePolicy(policy_string, immutable=True) @@ -642,11 +633,11 @@ def test_immutable_release_policy(self, client, **kwargs): with pytest.raises(HttpResponseError): self._update_key_properties(client, key, new_release_policy) - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_key_rotation(self, client, is_hsm, **kwargs): - if (not is_public_cloud() and self.is_live): + if not is_public_cloud() and self.is_live: pytest.skip("This test is not supported in usgov/china region. Follow up with service team.") key_name = self.get_resource_name("rotation-key") @@ -663,11 +654,11 @@ def test_key_rotation(self, client, is_hsm, **kwargs): assert key.properties.version != rotated_key.properties.version assert key.key.n != rotated_key.key.n - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_key_rotation_policy(self, client, is_hsm, **kwargs): - if (not is_public_cloud() and self.is_live): + if not is_public_cloud() and self.is_live: pytest.skip("This test is not supported in usgov/china region. Follow up with service team.") key_name = self.get_resource_name("rotation-key") @@ -720,7 +711,9 @@ def test_key_rotation_policy(self, client, is_hsm, **kwargs): if not is_hsm: # updating with a round-tripped policy and overriding lifetime_actions newest_actions = [KeyRotationLifetimeAction(KeyRotationPolicyAction.notify, time_before_expiry="P60D")] - newest_policy = client.update_key_rotation_policy(key_name, policy=new_policy, lifetime_actions=newest_actions) + newest_policy = client.update_key_rotation_policy( + key_name, policy=new_policy, lifetime_actions=newest_actions + ) newest_fetched_policy = client.get_key_rotation_policy(key_name) assert newest_policy.expires_in == "P90D" _assert_rotation_policies_equal(newest_policy, newest_fetched_policy) @@ -738,7 +731,7 @@ def test_key_rotation_policy(self, client, is_hsm, **kwargs): newest_fetched_policy_actions = newest_fetched_policy.lifetime_actions[i] _assert_lifetime_actions_equal(newest_policy_actions, newest_fetched_policy_actions) - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_get_cryptography_client(self, client, is_hsm, **kwargs): @@ -774,7 +767,7 @@ def test_get_cryptography_client(self, client, is_hsm, **kwargs): assert "RSA-OAEP" == result.algorithm assert plaintext == result.plaintext - @pytest.mark.parametrize("api_version,is_hsm",only_vault_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_vault_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_send_request(self, client, is_hsm, **kwargs): @@ -790,7 +783,7 @@ def test_send_request(self, client, is_hsm, **kwargs): response = client.send_request(request) assert response.json()["key"]["kid"] == key.id - @pytest.mark.parametrize("api_version,is_hsm",only_hsm_default) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm_default) @KeysClientPreparer() @recorded_by_proxy def test_get_key_attestation(self, client, **kwargs): @@ -834,6 +827,7 @@ def test_40x_handling(self, client, **kwargs): # Test that 409 is raised correctly (`create_key` shouldn't actually trigger this, but for raising behavior) def run(*_, **__): return Mock(http_response=Mock(status_code=409)) + with patch.object(client._client._client._pipeline, "run", run): with pytest.raises(ResourceExistsError): client.create_key("...", "RSA") diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_keys_async.py b/sdk/keyvault/azure-keyvault-keys/tests/test_keys_async.py index c1ec2e7a2e43..fe21f207819b 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_keys_async.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_keys_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -38,12 +39,8 @@ all_api_versions = get_decorator(is_async=True) only_hsm = get_decorator(only_hsm=True, is_async=True) only_hsm_default = get_decorator(only_hsm=True, is_async=True, api_versions=[DEFAULT_VERSION]) -only_hsm_7_4_plus = get_decorator( - only_hsm=True, is_async=True, api_versions=[ApiVersion.V7_4, ApiVersion.V7_5] -) -only_vault_7_4_plus = get_decorator( - only_vault=True, is_async=True, api_versions=[ApiVersion.V7_4, ApiVersion.V7_5] -) +only_hsm_7_4_plus = get_decorator(only_hsm=True, is_async=True, api_versions=[ApiVersion.V7_4, ApiVersion.V7_5]) +only_vault_7_4_plus = get_decorator(only_vault=True, is_async=True, api_versions=[ApiVersion.V7_4, ApiVersion.V7_5]) only_7_4_plus = get_decorator(is_async=True, api_versions=[ApiVersion.V7_4, ApiVersion.V7_5]) logging_enabled = get_decorator(is_async=True, logging_enable=True) logging_disabled = get_decorator(is_async=True, logging_enable=False) @@ -69,15 +66,15 @@ def _assert_jwks_equal(self, jwk1, jwk2): assert getattr(jwk1, field) == getattr(jwk2, field) def _assert_key_attributes_equal(self, k1: KeyProperties, k2: KeyProperties) -> None: - assert k1.name== k2.name - assert k1.vault_url== k2.vault_url - assert k1.enabled== k2.enabled - assert k1.not_before== k2.not_before - assert k1.expires_on== k2.expires_on - assert k1.created_on== k2.created_on - assert k1.updated_on== k2.updated_on - assert k1.tags== k2.tags - assert k1.recovery_level== k2.recovery_level + assert k1.name == k2.name + assert k1.vault_url == k2.vault_url + assert k1.enabled == k2.enabled + assert k1.not_before == k2.not_before + assert k1.expires_on == k2.expires_on + assert k1.created_on == k2.created_on + assert k1.updated_on == k2.updated_on + assert k1.tags == k2.tags + assert k1.recovery_level == k2.recovery_level assert k1.hsm_platform == k2.hsm_platform async def _create_rsa_key(self, client, key_name, **kwargs): @@ -107,7 +104,9 @@ def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kt assert key_curve == key.crv assert kid.index(prefix) == 0, f"Key Id should start with '{prefix}', but value is '{kid}'" assert key.kty == kty, f"kty should be '{kty}', but is '{key.kty}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on,"Missing required date attributes." + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops): prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name]) @@ -117,7 +116,9 @@ def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops assert key.kty == kty, f"kty should be '{kty}', but is '{key.kty}'" assert key.n and key.e, "Bad RSA public material." assert sorted(key_ops) == sorted(key.key_ops), f"keyOps should be '{key_ops}', but is '{key.key_ops}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on,"Missing required date attributes." + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." async def _update_key_properties(self, client, key, release_policy=None): expires = date_parse.parse("2050-01-02T08:00:00.000Z") @@ -183,7 +184,7 @@ def _to_bytes(hex): return imported_key @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_key_crud_operations(self, client, is_hsm, **kwargs): @@ -224,9 +225,7 @@ async def test_key_crud_operations(self, client, is_hsm, **kwargs): self._assert_key_attributes_equal(rsa_key.properties, key.properties) # get key without version - self._assert_key_attributes_equal( - rsa_key.properties, (await client.get_key(rsa_key.name)).properties - ) + self._assert_key_attributes_equal(rsa_key.properties, (await client.get_key(rsa_key.name)).properties) # update key with version if self.is_live: @@ -242,7 +241,9 @@ async def test_key_crud_operations(self, client, is_hsm, **kwargs): # aside from key_ops, the original updated keys should have the same JWKs self._assert_jwks_equal(rsa_key.key, deleted_key.key) assert deleted_key.id == rsa_key.id - assert deleted_key.recovery_id and deleted_key.deleted_date and deleted_key.scheduled_purge_date,"Missing required deleted key attributes." + assert ( + deleted_key.recovery_id and deleted_key.deleted_date and deleted_key.scheduled_purge_date + ), "Missing required deleted key attributes." # get the deleted key when soft deleted enabled deleted_key = await client.get_deleted_key(rsa_key.name) @@ -250,7 +251,7 @@ async def test_key_crud_operations(self, client, is_hsm, **kwargs): assert rsa_key.id == deleted_key.id @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_rsa_public_exponent(self, client, **kwargs): @@ -263,7 +264,7 @@ async def test_rsa_public_exponent(self, client, **kwargs): assert public_exponent == 17 @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_backup_restore(self, client, is_hsm, **kwargs): @@ -276,7 +277,7 @@ async def test_backup_restore(self, client, is_hsm, **kwargs): # backup key key_backup = await client.backup_key(created_bundle.name) - #self.assertIsNotNone(key_backup, "key_backup") + # self.assertIsNotNone(key_backup, "key_backup") assert key_backup is not None # delete key @@ -291,7 +292,7 @@ async def test_backup_restore(self, client, is_hsm, **kwargs): self._assert_key_attributes_equal(created_bundle.properties, restored_key.properties) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_key_list(self, client, is_hsm, **kwargs): @@ -315,7 +316,7 @@ async def test_key_list(self, client, is_hsm, **kwargs): assert len(expected) == 0 @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_list_versions(self, client, is_hsm, **kwargs): @@ -343,7 +344,7 @@ async def test_list_versions(self, client, is_hsm, **kwargs): @pytest.mark.asyncio @pytest.mark.skip("Temporarily disabled due to service issue") - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_list_deleted_keys(self, client, is_hsm, **kwargs): @@ -376,7 +377,7 @@ async def test_list_deleted_keys(self, client, is_hsm, **kwargs): @pytest.mark.asyncio @pytest.mark.skip("Temporarily disabled due to service issue") - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_recover(self, client, is_hsm, **kwargs): @@ -407,7 +408,7 @@ async def test_recover(self, client, is_hsm, **kwargs): assert len(set(expected.keys()) & set(actual.keys())) == len(expected) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_purge(self, client, is_hsm, **kwargs): @@ -435,8 +436,8 @@ async def test_purge(self, client, is_hsm, **kwargs): assert deleted_key.name not in key_names @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",logging_enabled) - @AsyncKeysClientPreparer(logging_enable = True) + @pytest.mark.parametrize("api_version,is_hsm", logging_enabled) + @AsyncKeysClientPreparer(logging_enable=True) @recorded_by_proxy_async async def test_logging_enabled(self, client, is_hsm, **kwargs): mock_handler = MockHandler() @@ -471,8 +472,8 @@ async def test_logging_enabled(self, client, is_hsm, **kwargs): assert False, "Expected request body wasn't logged" @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",logging_disabled) - @AsyncKeysClientPreparer(logging_enable = False) + @pytest.mark.parametrize("api_version,is_hsm", logging_disabled) + @AsyncKeysClientPreparer(logging_enable=False) @recorded_by_proxy_async async def test_logging_disabled(self, client, is_hsm, **kwargs): mock_handler = MockHandler() @@ -506,7 +507,7 @@ async def test_logging_disabled(self, client, is_hsm, **kwargs): mock_handler.close() @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_get_random_bytes(self, client, **kwargs): @@ -523,11 +524,11 @@ async def test_get_random_bytes(self, client, **kwargs): generated_random_bytes.append(random_bytes) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_key_release(self, client, is_hsm, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") if is_hsm and client.api_version == ApiVersion.V7_5: pytest.skip("Currently failing on 7.5-preview.1; skipping for now") @@ -553,7 +554,7 @@ async def test_key_release(self, client, is_hsm, **kwargs): pytest.skip("Target environment attestation statement cannot be verified. Likely transient failure.") @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_imported_key_release(self, client, **kwargs): @@ -576,11 +577,11 @@ async def test_imported_key_release(self, client, **kwargs): assert release_result.value @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_update_release_policy(self, client, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") if client.api_version == ApiVersion.V7_5: pytest.skip("Currently failing on 7.5-preview.1; skipping for now") @@ -600,17 +601,9 @@ async def test_update_release_policy(self, client, **kwargs): new_release_policy_json = { "anyOf": [ - { - "anyOf": [ - { - "claim": "sdk-test", - "equals": False - } - ], - "authority": attestation_uri.rstrip("/") + "/" - } + {"anyOf": [{"claim": "sdk-test", "equals": False}], "authority": attestation_uri.rstrip("/") + "/"} ], - "version": "1.0.0" + "version": "1.0.0", } policy_string = json.dumps(new_release_policy_json).encode() new_release_policy = KeyReleasePolicy(policy_string) @@ -623,11 +616,11 @@ async def test_update_release_policy(self, client, **kwargs): # Immutable policies aren't currently supported on Managed HSM @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_vault_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_vault_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_immutable_release_policy(self, client, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") attestation_uri = self._get_attestation_uri() @@ -641,17 +634,9 @@ async def test_immutable_release_policy(self, client, **kwargs): new_release_policy_json = { "anyOf": [ - { - "anyOf": [ - { - "claim": "sdk-test", - "equals": False - } - ], - "authority": attestation_uri.rstrip("/") + "/" - } + {"anyOf": [{"claim": "sdk-test", "equals": False}], "authority": attestation_uri.rstrip("/") + "/"} ], - "version": "1.0.0" + "version": "1.0.0", } policy_string = json.dumps(new_release_policy_json).encode() new_release_policy = KeyReleasePolicy(policy_string, immutable=True) @@ -660,11 +645,11 @@ async def test_immutable_release_policy(self, client, **kwargs): await self._update_key_properties(client, key, new_release_policy) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_key_rotation(self, client, is_hsm, **kwargs): - if (not is_public_cloud() and self.is_live): + if not is_public_cloud() and self.is_live: pytest.skip("This test is not supported in usgov/china region. Follow up with service team.") key_name = self.get_resource_name("rotation-key") @@ -684,11 +669,11 @@ async def test_key_rotation(self, client, is_hsm, **kwargs): assert key.key.n != rotated_key.key.n @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_key_rotation_policy(self, client, is_hsm, **kwargs): - if (not is_public_cloud() and self.is_live): + if not is_public_cloud() and self.is_live: pytest.skip("This test is not supported in usgov/china region. Follow up with service team.") key_name = self.get_resource_name("rotation-key") @@ -763,7 +748,7 @@ async def test_key_rotation_policy(self, client, is_hsm, **kwargs): _assert_lifetime_actions_equal(newest_policy_actions, newest_fetched_policy_actions) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_get_cryptography_client(self, client, is_hsm, **kwargs): @@ -800,7 +785,7 @@ async def test_get_cryptography_client(self, client, is_hsm, **kwargs): assert plaintext == result.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_vault_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_vault_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_send_request(self, client, is_hsm, **kwargs): @@ -817,7 +802,7 @@ async def test_send_request(self, client, is_hsm, **kwargs): assert response.json()["key"]["kid"] == key.id @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm_default) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm_default) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_get_key_attestation(self, client, **kwargs): @@ -864,6 +849,7 @@ async def test_40x_handling(self, client, **kwargs): # Test that 409 is raised correctly (`create_key` shouldn't actually trigger this, but for raising behavior) async def run(*_, **__): return Mock(http_response=Mock(status_code=409)) + with patch.object(client._client._client._pipeline, "run", run): with pytest.raises(ResourceExistsError): await client.create_key("...", "RSA") diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_local_crypto.py b/sdk/keyvault/azure-keyvault-keys/tests/test_local_crypto.py index 52ec32992543..e9b2867fc08d 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_local_crypto.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_local_crypto.py @@ -7,10 +7,8 @@ import pytest from azure.keyvault.keys import KeyCurveName, KeyVaultKey -from azure.keyvault.keys.crypto import (EncryptionAlgorithm, KeyWrapAlgorithm, - SignatureAlgorithm) -from azure.keyvault.keys.crypto._providers import \ - get_local_cryptography_provider +from azure.keyvault.keys.crypto import EncryptionAlgorithm, KeyWrapAlgorithm, SignatureAlgorithm +from azure.keyvault.keys.crypto._providers import get_local_cryptography_provider from keys import EC_KEYS, RSA_KEYS @@ -48,14 +46,14 @@ def test_rsa_encrypt_decrypt(key, algorithm): (EncryptionAlgorithm.a256_cbcpad, 32), (EncryptionAlgorithm.a192_cbcpad, 24), (EncryptionAlgorithm.a128_cbcpad, 16), - ) + ), ) def test_symmetric_encrypt_decrypt(algorithm, key_size): jwk = { "k": os.urandom(key_size), - "kid":"http://localhost/keys/key/version", + "kid": "http://localhost/keys/key/version", "kty": "oct-HSM", - "key_ops": ("encrypt", "decrypt") + "key_ops": ("encrypt", "decrypt"), } key = KeyVaultKey(key_id="http://localhost/keys/key/version", jwk=jwk) provider = get_local_cryptography_provider(key.key) @@ -119,9 +117,9 @@ def test_rsa_wrap_unwrap(key, algorithm): def test_symmetric_wrap_unwrap(algorithm): jwk = { "k": os.urandom(32), - "kid":"http://localhost/keys/key/version", + "kid": "http://localhost/keys/key/version", "kty": "oct", - "key_ops": ("unwrapKey", "wrapKey") + "key_ops": ("unwrapKey", "wrapKey"), } key = KeyVaultKey(key_id="http://localhost/keys/key/version", jwk=jwk) provider = get_local_cryptography_provider(key.key) diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_parse_id.py b/sdk/keyvault/azure-keyvault-keys/tests/test_parse_id.py index 3f72b8cb9556..eb33db5f2860 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_parse_id.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_parse_id.py @@ -15,7 +15,7 @@ class TestParseId(KeyVaultTestCase, KeysTestCase): - @pytest.mark.parametrize("api_version,is_hsm",only_vault) + @pytest.mark.parametrize("api_version,is_hsm", only_vault) @KeysClientPreparer() @recorded_by_proxy def test_parse_key_id_with_version(self, client, **kwargs): diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys.py b/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys.py index 68e2b6496d64..be044fe64650 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys.py @@ -36,11 +36,11 @@ def test_create_key_client(): class TestExamplesKeyVault(KeyVaultTestCase, KeysTestCase): - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_example_key_crud_operations(self, key_client, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") key_name = self.get_resource_name("key-name") @@ -131,7 +131,7 @@ def test_example_key_crud_operations(self, key_client, **kwargs): deleted_key_poller.wait() # [END delete_key] - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @KeysClientPreparer() @recorded_by_proxy def test_example_create_oct_key(self, key_client, **kwargs): @@ -145,7 +145,7 @@ def test_example_create_oct_key(self, key_client, **kwargs): print(key.key_type) # [END create_oct_key] - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_example_key_list_operations(self, key_client, **kwargs): @@ -186,7 +186,7 @@ def test_example_key_list_operations(self, key_client, **kwargs): print(key.deleted_date) # [END list_deleted_keys] - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_example_keys_backup_restore(self, key_client, **kwargs): @@ -219,7 +219,7 @@ def test_example_keys_backup_restore(self, key_client, **kwargs): print(restored_key.properties.version) # [END restore_key_backup] - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_example_keys_recover(self, key_client, **kwargs): diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys_async.py b/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys_async.py index 1a3425ffc03e..f93cbe624798 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys_async.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys_async.py @@ -43,11 +43,11 @@ async def test_create_key_client(): class TestExamplesKeyVault(KeyVaultTestCase): @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_example_key_crud_operations(self, key_client, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") key_name = self.get_resource_name("key-name") @@ -133,7 +133,7 @@ async def test_example_key_crud_operations(self, key_client, **kwargs): # [END delete_key] @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_example_create_oct_key(self, key_client, **kwargs): @@ -148,7 +148,7 @@ async def test_example_create_oct_key(self, key_client, **kwargs): # [END create_oct_key] @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_example_key_list_operations(self, key_client, **kwargs): @@ -195,7 +195,7 @@ async def test_example_key_list_operations(self, key_client, **kwargs): # [END list_deleted_keys] @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_example_keys_backup_restore(self, key_client, **kwargs): @@ -230,7 +230,7 @@ async def test_example_keys_backup_restore(self, key_client, **kwargs): # [END restore_key_backup] @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_example_keys_recover(self, key_client, **kwargs): From 02a0ba86731149466e302e78c163c27addf54f65 Mon Sep 17 00:00:00 2001 From: Nicola Camillucci Date: Thu, 16 Apr 2026 17:34:10 +0100 Subject: [PATCH 03/11] Regerated Administration SDK --- .../_metadata.json | 6 + .../apiview-properties.json | 70 +++ .../azure/__init__.py | 4 - .../azure/keyvault/__init__.py | 4 - .../administration/_access_control_client.py | 38 +- .../keyvault/administration/_backup_client.py | 28 +- .../administration/_generated/_client.py | 16 +- .../_generated/_configuration.py | 10 +- .../_generated/_utils/model_base.py | 421 +++++++++++++----- .../_generated/_utils/serialization.py | 43 +- .../administration/_generated/_validation.py | 20 +- .../administration/_generated/aio/_client.py | 16 +- .../_generated/aio/_configuration.py | 10 +- .../_generated/aio/operations/__init__.py | 3 +- .../_generated/aio/operations/_operations.py | 187 +++++--- .../_generated/models/_enums.py | 4 +- .../_generated/models/_models.py | 28 +- .../_generated/operations/__init__.py | 3 +- .../_generated/operations/_operations.py | 225 +++++++--- .../_internal/async_challenge_auth_policy.py | 10 +- .../_internal/async_client_base.py | 8 +- .../administration/_internal/client_base.py | 2 +- .../_internal/http_challenge.py | 8 +- .../administration/_internal/polling.py | 2 +- .../azure/keyvault/administration/_models.py | 16 +- .../administration/_settings_client.py | 7 +- .../aio/_access_control_client.py | 38 +- .../administration/aio/_backup_client.py | 28 +- .../administration/aio/_settings_client.py | 7 +- .../samples/access_control_operations.py | 6 +- .../access_control_operations_async.py | 9 +- .../samples/backup_restore_operations.py | 2 +- .../backup_restore_operations_async.py | 5 +- .../samples/settings_operations.py | 3 +- .../samples/settings_operations_async.py | 4 +- .../tests/_async_test_case.py | 10 +- .../tests/_test_case.py | 8 +- .../tests/conftest.py | 17 +- .../perfstress_tests/get_role_definition.py | 8 +- .../tests/test_access_control.py | 14 +- .../tests/test_access_control_async.py | 19 +- .../tests/test_backup_client.py | 8 +- .../tests/test_backup_client_async.py | 15 +- .../tests/test_examples_administration.py | 5 +- .../test_examples_administration_async.py | 3 +- 45 files changed, 907 insertions(+), 491 deletions(-) create mode 100644 sdk/keyvault/azure-keyvault-administration/_metadata.json create mode 100644 sdk/keyvault/azure-keyvault-administration/apiview-properties.json diff --git a/sdk/keyvault/azure-keyvault-administration/_metadata.json b/sdk/keyvault/azure-keyvault-administration/_metadata.json new file mode 100644 index 000000000000..0a2924fbf51d --- /dev/null +++ b/sdk/keyvault/azure-keyvault-administration/_metadata.json @@ -0,0 +1,6 @@ +{ + "apiVersion": "2025-07-01", + "apiVersions": { + "KeyVault": "2025-07-01" + } +} \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-administration/apiview-properties.json b/sdk/keyvault/azure-keyvault-administration/apiview-properties.json new file mode 100644 index 000000000000..320d39e0f4fe --- /dev/null +++ b/sdk/keyvault/azure-keyvault-administration/apiview-properties.json @@ -0,0 +1,70 @@ +{ + "CrossLanguagePackageId": "KeyVault", + "CrossLanguageDefinitionId": { + "azure.keyvault.administration._generated.models.FullBackupOperation": "KeyVault.FullBackupOperation", + "azure.keyvault.administration._generated.models.FullBackupOperationError": "KeyVault.FullBackupOperation.error.anonymous", + "azure.keyvault.administration._generated.models.KeyVaultError": "KeyVaultError", + "azure.keyvault.administration._generated.models.Permission": "KeyVault.Permission", + "azure.keyvault.administration._generated.models.PreBackupOperationParameters": "KeyVault.PreBackupOperationParameters", + "azure.keyvault.administration._generated.models.PreRestoreOperationParameters": "KeyVault.PreRestoreOperationParameters", + "azure.keyvault.administration._generated.models.RestoreOperation": "KeyVault.RestoreOperation", + "azure.keyvault.administration._generated.models.RestoreOperationParameters": "KeyVault.RestoreOperationParameters", + "azure.keyvault.administration._generated.models.RoleAssignment": "KeyVault.RoleAssignment", + "azure.keyvault.administration._generated.models.RoleAssignmentCreateParameters": "KeyVault.RoleAssignmentCreateParameters", + "azure.keyvault.administration._generated.models.RoleAssignmentProperties": "KeyVault.RoleAssignmentProperties", + "azure.keyvault.administration._generated.models.RoleAssignmentPropertiesWithScope": "KeyVault.RoleAssignmentPropertiesWithScope", + "azure.keyvault.administration._generated.models.RoleDefinition": "KeyVault.RoleDefinition", + "azure.keyvault.administration._generated.models.RoleDefinitionCreateParameters": "KeyVault.RoleDefinitionCreateParameters", + "azure.keyvault.administration._generated.models.RoleDefinitionProperties": "KeyVault.RoleDefinitionProperties", + "azure.keyvault.administration._generated.models.SASTokenParameter": "KeyVault.SASTokenParameter", + "azure.keyvault.administration._generated.models.SelectiveKeyRestoreOperation": "KeyVault.SelectiveKeyRestoreOperation", + "azure.keyvault.administration._generated.models.SelectiveKeyRestoreOperationParameters": "KeyVault.SelectiveKeyRestoreOperationParameters", + "azure.keyvault.administration._generated.models.Setting": "KeyVault.Setting", + "azure.keyvault.administration._generated.models.SettingsListResult": "KeyVault.SettingsListResult", + "azure.keyvault.administration._generated.models.UpdateSettingRequest": "KeyVault.UpdateSettingRequest", + "azure.keyvault.administration._generated.models.RoleDefinitionType": "KeyVault.RoleDefinitionType", + "azure.keyvault.administration._generated.models.RoleType": "KeyVault.RoleType", + "azure.keyvault.administration._generated.models.DataAction": "KeyVault.DataAction", + "azure.keyvault.administration._generated.models.RoleScope": "KeyVault.RoleScope", + "azure.keyvault.administration._generated.models.OperationStatus": "KeyVault.OperationStatus", + "azure.keyvault.administration._generated.models.SettingTypeEnum": "KeyVault.SettingTypeEnum", + "azure.keyvault.administration._generated.operations.RoleDefinitionsOperations.delete": "KeyVault.RoleDefinitions.delete", + "azure.keyvault.administration._generated.aio.operations.RoleDefinitionsOperations.delete": "KeyVault.RoleDefinitions.delete", + "azure.keyvault.administration._generated.operations.RoleDefinitionsOperations.create_or_update": "KeyVault.RoleDefinitions.createOrUpdate", + "azure.keyvault.administration._generated.aio.operations.RoleDefinitionsOperations.create_or_update": "KeyVault.RoleDefinitions.createOrUpdate", + "azure.keyvault.administration._generated.operations.RoleDefinitionsOperations.get": "KeyVault.RoleDefinitions.get", + "azure.keyvault.administration._generated.aio.operations.RoleDefinitionsOperations.get": "KeyVault.RoleDefinitions.get", + "azure.keyvault.administration._generated.operations.RoleDefinitionsOperations.list": "KeyVault.RoleDefinitions.list", + "azure.keyvault.administration._generated.aio.operations.RoleDefinitionsOperations.list": "KeyVault.RoleDefinitions.list", + "azure.keyvault.administration._generated.operations.RoleAssignmentsOperations.delete": "KeyVault.RoleAssignments.delete", + "azure.keyvault.administration._generated.aio.operations.RoleAssignmentsOperations.delete": "KeyVault.RoleAssignments.delete", + "azure.keyvault.administration._generated.operations.RoleAssignmentsOperations.create": "KeyVault.RoleAssignments.create", + "azure.keyvault.administration._generated.aio.operations.RoleAssignmentsOperations.create": "KeyVault.RoleAssignments.create", + "azure.keyvault.administration._generated.operations.RoleAssignmentsOperations.get": "KeyVault.RoleAssignments.get", + "azure.keyvault.administration._generated.aio.operations.RoleAssignmentsOperations.get": "KeyVault.RoleAssignments.get", + "azure.keyvault.administration._generated.operations.RoleAssignmentsOperations.list_for_scope": "KeyVault.RoleAssignments.listForScope", + "azure.keyvault.administration._generated.aio.operations.RoleAssignmentsOperations.list_for_scope": "KeyVault.RoleAssignments.listForScope", + "azure.keyvault.administration._generated.KeyVaultClient.full_backup_status": "KeyVault.fullBackupStatus", + "azure.keyvault.administration._generated.aio.KeyVaultClient.full_backup_status": "KeyVault.fullBackupStatus", + "azure.keyvault.administration._generated.KeyVaultClient.begin_full_backup": "KeyVault.fullBackup", + "azure.keyvault.administration._generated.aio.KeyVaultClient.begin_full_backup": "KeyVault.fullBackup", + "azure.keyvault.administration._generated.KeyVaultClient.begin_pre_full_backup": "KeyVault.preFullBackup", + "azure.keyvault.administration._generated.aio.KeyVaultClient.begin_pre_full_backup": "KeyVault.preFullBackup", + "azure.keyvault.administration._generated.KeyVaultClient.restore_status": "KeyVault.restoreStatus", + "azure.keyvault.administration._generated.aio.KeyVaultClient.restore_status": "KeyVault.restoreStatus", + "azure.keyvault.administration._generated.KeyVaultClient.begin_full_restore_operation": "KeyVault.fullRestoreOperation", + "azure.keyvault.administration._generated.aio.KeyVaultClient.begin_full_restore_operation": "KeyVault.fullRestoreOperation", + "azure.keyvault.administration._generated.KeyVaultClient.begin_pre_full_restore_operation": "KeyVault.preFullRestoreOperation", + "azure.keyvault.administration._generated.aio.KeyVaultClient.begin_pre_full_restore_operation": "KeyVault.preFullRestoreOperation", + "azure.keyvault.administration._generated.KeyVaultClient.selective_key_restore_status": "KeyVault.selectiveKeyRestoreStatus", + "azure.keyvault.administration._generated.aio.KeyVaultClient.selective_key_restore_status": "KeyVault.selectiveKeyRestoreStatus", + "azure.keyvault.administration._generated.KeyVaultClient.begin_selective_key_restore_operation": "KeyVault.selectiveKeyRestoreOperation", + "azure.keyvault.administration._generated.aio.KeyVaultClient.begin_selective_key_restore_operation": "KeyVault.selectiveKeyRestoreOperation", + "azure.keyvault.administration._generated.KeyVaultClient.update_setting": "KeyVault.updateSetting", + "azure.keyvault.administration._generated.aio.KeyVaultClient.update_setting": "KeyVault.updateSetting", + "azure.keyvault.administration._generated.KeyVaultClient.get_setting": "KeyVault.getSetting", + "azure.keyvault.administration._generated.aio.KeyVaultClient.get_setting": "KeyVault.getSetting", + "azure.keyvault.administration._generated.KeyVaultClient.get_settings": "KeyVault.getSettings", + "azure.keyvault.administration._generated.aio.KeyVaultClient.get_settings": "KeyVault.getSettings" + } +} \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-administration/azure/__init__.py b/sdk/keyvault/azure-keyvault-administration/azure/__init__.py index 679ab6995134..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/__init__.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/__init__.py @@ -1,5 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/__init__.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/__init__.py index 679ab6995134..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/__init__.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/__init__.py @@ -1,5 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_access_control_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_access_control_client.py index f41733a1a07a..390fcaf0c4ad 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_access_control_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_access_control_client.py @@ -67,15 +67,10 @@ def create_role_assignment( assignment_name = name or uuid4() create_parameters = RoleAssignmentCreateParameters( - properties=RoleAssignmentProperties( - principal_id=principal_id, role_definition_id=str(definition_id) - ) + properties=RoleAssignmentProperties(principal_id=principal_id, role_definition_id=str(definition_id)) ) assignment = self._client.role_assignments.create( - scope=scope, - role_assignment_name=str(assignment_name), - parameters=create_parameters, - **kwargs + scope=scope, role_assignment_name=str(assignment_name), parameters=create_parameters, **kwargs ) return KeyVaultRoleAssignment._from_generated(assignment) @@ -95,9 +90,7 @@ def delete_role_assignment( :rtype: None """ try: - self._client.role_assignments.delete( - scope=scope, role_assignment_name=str(name), **kwargs - ) + self._client.role_assignments.delete(scope=scope, role_assignment_name=str(name), **kwargs) except ResourceNotFoundError: pass @@ -116,9 +109,7 @@ def get_role_assignment( :returns: The fetched role assignment. :rtype: ~azure.keyvault.administration.KeyVaultRoleAssignment """ - assignment = self._client.role_assignments.get( - scope=scope, role_assignment_name=str(name), **kwargs - ) + assignment = self._client.role_assignments.get(scope=scope, role_assignment_name=str(name), **kwargs) return KeyVaultRoleAssignment._from_generated(assignment) @distributed_trace @@ -135,9 +126,7 @@ def list_role_assignments( :rtype: ~azure.core.paging.ItemPaged[~azure.keyvault.administration.KeyVaultRoleAssignment] """ return self._client.role_assignments.list_for_scope( - scope=scope, - cls=lambda result: [KeyVaultRoleAssignment._from_generated(a) for a in result], - **kwargs + scope=scope, cls=lambda result: [KeyVaultRoleAssignment._from_generated(a) for a in result], **kwargs ) @distributed_trace @@ -198,10 +187,7 @@ def set_role_definition( parameters = RoleDefinitionCreateParameters(properties=properties) definition = self._client.role_definitions.create_or_update( - scope=scope, - role_definition_name=str(name or uuid4()), - parameters=parameters, - **kwargs + scope=scope, role_definition_name=str(name or uuid4()), parameters=parameters, **kwargs ) return KeyVaultRoleDefinition._from_generated(definition) @@ -220,9 +206,7 @@ def get_role_definition( :returns: The fetched role definition. :rtype: ~azure.keyvault.administration.KeyVaultRoleDefinition """ - definition = self._client.role_definitions.get( - scope=scope, role_definition_name=str(name), **kwargs - ) + definition = self._client.role_definitions.get(scope=scope, role_definition_name=str(name), **kwargs) return KeyVaultRoleDefinition._from_generated(definition) @distributed_trace @@ -241,9 +225,7 @@ def delete_role_definition( :rtype: None """ try: - self._client.role_definitions.delete( - scope=scope, role_definition_name=str(name), **kwargs - ) + self._client.role_definitions.delete(scope=scope, role_definition_name=str(name), **kwargs) except ResourceNotFoundError: pass @@ -261,9 +243,7 @@ def list_role_definitions( :rtype: ~azure.core.paging.ItemPaged[~azure.keyvault.administration.KeyVaultRoleDefinition] """ return self._client.role_definitions.list( - scope=scope, - cls=lambda result: [KeyVaultRoleDefinition._from_generated(d) for d in result], - **kwargs + scope=scope, cls=lambda result: [KeyVaultRoleDefinition._from_generated(d) for d in result], **kwargs ) def __enter__(self) -> "KeyVaultAccessControlClient": diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_backup_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_backup_client.py index b6b5a9ecf04e..898770ae2647 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_backup_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_backup_client.py @@ -101,9 +101,7 @@ def _use_continuation_token(self, continuation_token: str, status_method: Callab + "operation poller's continuation_token() method" ) from ex - pipeline_response = status_method( - job_id=job_id, cls=lambda pipeline_response, _, __: pipeline_response - ) + pipeline_response = status_method(job_id=job_id, cls=lambda pipeline_response, _, __: pipeline_response) if "azure-asyncoperation" not in pipeline_response.http_response.headers: pipeline_response.http_response.headers["azure-asyncoperation"] = status_url return _get_continuation_token(pipeline_response) @@ -116,8 +114,7 @@ def begin_backup( use_managed_identity: Literal[True], continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[KeyVaultBackupResult]: - ... + ) -> LROPoller[KeyVaultBackupResult]: ... @overload def begin_backup( @@ -127,8 +124,7 @@ def begin_backup( sas_token: str, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[KeyVaultBackupResult]: - ... + ) -> LROPoller[KeyVaultBackupResult]: ... # Disabling pylint checks because they don't correctly handle overloads @distributed_trace @@ -195,8 +191,7 @@ def begin_restore( key_name: Optional[str] = None, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[None]: - ... + ) -> LROPoller[None]: ... @overload def begin_restore( @@ -207,8 +202,7 @@ def begin_restore( key_name: Optional[str] = None, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[None]: - ... + ) -> LROPoller[None]: ... # Disabling pylint checks because they don't correctly handle overloads @distributed_trace @@ -300,8 +294,7 @@ def begin_pre_backup( use_managed_identity: Literal[True], continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[None]: - ... + ) -> LROPoller[None]: ... @overload def begin_pre_backup( @@ -311,8 +304,7 @@ def begin_pre_backup( sas_token: str, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[None]: - ... + ) -> LROPoller[None]: ... @distributed_trace def begin_pre_backup( # pylint: disable=docstring-keyword-should-match-keyword-only @@ -368,8 +360,7 @@ def begin_pre_restore( use_managed_identity: Literal[True], continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[None]: - ... + ) -> LROPoller[None]: ... @overload def begin_pre_restore( @@ -379,8 +370,7 @@ def begin_pre_restore( sas_token: str, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[None]: - ... + ) -> LROPoller[None]: ... @distributed_trace def begin_pre_restore( # pylint: disable=docstring-keyword-should-match-keyword-only diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_client.py index bc155465c5f3..0a83e4c8f786 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_client.py @@ -16,15 +16,15 @@ from ._configuration import KeyVaultClientConfiguration from ._utils.serialization import Deserializer, Serializer -from .operations import KeyVaultClientOperationsMixin, RoleAssignmentsOperations, RoleDefinitionsOperations +from .operations import RoleAssignmentsOperations, RoleDefinitionsOperations, _KeyVaultClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials import TokenCredential -class KeyVaultClient(KeyVaultClientOperationsMixin): - """The key vault client performs cryptographic key operations and vault operations against the Key - Vault service. +class KeyVaultClient(_KeyVaultClientOperationsMixin): + """The Azure Key Vault Administration service client performs administrative operations including + RBAC, BackupRestore, and settings management against the Azure Key Vault service. :ivar role_definitions: RoleDefinitionsOperations operations :vartype role_definitions: @@ -32,12 +32,14 @@ class KeyVaultClient(KeyVaultClientOperationsMixin): :ivar role_assignments: RoleAssignmentsOperations operations :vartype role_assignments: azure.keyvault.administration._generated.operations.RoleAssignmentsOperations - :param vault_base_url: Required. + :param vault_base_url: The base URL of the Key Vault instance (e.g. + `https://myvault.vault.azure.net/ `_). Required. :type vault_base_url: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials.TokenCredential - :keyword api_version: The API version to use for this operation. Default value is "7.6". Note - that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-07-01". + Default value is "2025-07-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_configuration.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_configuration.py index 21e36dd5a12e..1e7442eedff6 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_configuration.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_configuration.py @@ -22,17 +22,19 @@ class KeyVaultClientConfiguration: # pylint: disable=too-many-instance-attribut Note that all parameters used to create this instance are saved as instance attributes. - :param vault_base_url: Required. + :param vault_base_url: The base URL of the Key Vault instance (e.g. + `https://myvault.vault.azure.net/ `_). Required. :type vault_base_url: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials.TokenCredential - :keyword api_version: The API version to use for this operation. Default value is "7.6". Note - that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-07-01". + Default value is "2025-07-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str """ def __init__(self, vault_base_url: str, credential: "TokenCredential", **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "7.6") + api_version: str = kwargs.pop("api_version", "2025-07-01") if vault_base_url is None: raise ValueError("Parameter 'vault_base_url' must not be None.") diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_utils/model_base.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_utils/model_base.py index 49d5c7259389..db24930fdca9 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_utils/model_base.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_utils/model_base.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -29,6 +29,7 @@ from azure.core import CaseInsensitiveEnumMeta from azure.core.pipeline import PipelineResponse from azure.core.serialization import _Null +from azure.core.rest import HttpResponse _LOGGER = logging.getLogger(__name__) @@ -36,6 +37,7 @@ TZ_UTC = timezone.utc _T = typing.TypeVar("_T") +_NONE_TYPE = type(None) def _timedelta_as_isostr(td: timedelta) -> str: @@ -170,6 +172,21 @@ def default(self, o): # pylint: disable=too-many-return-statements r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" ) +_ARRAY_ENCODE_MAPPING = { + "pipeDelimited": "|", + "spaceDelimited": " ", + "commaDelimited": ",", + "newlineDelimited": "\n", +} + + +def _deserialize_array_encoded(delimit: str, attr): + if isinstance(attr, str): + if attr == "": + return [] + return attr.split(delimit) + return attr + def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: """Deserialize ISO-8601 formatted string into Datetime object. @@ -201,7 +218,7 @@ def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: test_utc = date_obj.utctimetuple() if test_utc.tm_year > 9999 or test_utc.tm_year < 1: raise OverflowError("Hit max or min date") - return date_obj + return date_obj # type: ignore[no-any-return] def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: @@ -255,7 +272,7 @@ def _deserialize_time(attr: typing.Union[str, time]) -> time: """ if isinstance(attr, time): return attr - return isodate.parse_time(attr) + return isodate.parse_time(attr) # type: ignore[no-any-return] def _deserialize_bytes(attr): @@ -314,6 +331,8 @@ def _deserialize_int_as_str(attr): def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): if annotation is int and rf and rf._format == "str": return _deserialize_int_as_str + if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING: + return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format]) if rf and rf._format: return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore @@ -345,16 +364,46 @@ def _get_model(module_name: str, model_name: str): class _MyMutableMapping(MutableMapping[str, typing.Any]): - def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + def __init__(self, data: dict[str, typing.Any]) -> None: self._data = data def __contains__(self, key: typing.Any) -> bool: return key in self._data def __getitem__(self, key: str) -> typing.Any: + # If this key has been deserialized (for mutable types), we need to handle serialization + if hasattr(self, "_attr_to_rest_field"): + cache_attr = f"_deserialized_{key}" + if hasattr(self, cache_attr): + rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key) + if rf: + value = self._data.get(key) + if isinstance(value, (dict, list, set)): + # For mutable types, serialize and return + # But also update _data with serialized form and clear flag + # so mutations via this returned value affect _data + serialized = _serialize(value, rf._format) + # If serialized form is same type (no transformation needed), + # return _data directly so mutations work + if isinstance(serialized, type(value)) and serialized == value: + return self._data.get(key) + # Otherwise return serialized copy and clear flag + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + # Store serialized form back + self._data[key] = serialized + return serialized return self._data.__getitem__(key) def __setitem__(self, key: str, value: typing.Any) -> None: + # Clear any cached deserialized value when setting through dictionary access + cache_attr = f"_deserialized_{key}" + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass self._data.__setitem__(key, value) def __delitem__(self, key: str) -> None: @@ -425,7 +474,7 @@ def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: return self._data.pop(key) return self._data.pop(key, default) - def popitem(self) -> typing.Tuple[str, typing.Any]: + def popitem(self) -> tuple[str, typing.Any]: """ Removes and returns some (key, value) pair :returns: The (key, value) pair. @@ -466,6 +515,8 @@ def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: return self._data.setdefault(key, default) def __eq__(self, other: typing.Any) -> bool: + if isinstance(other, _MyMutableMapping): + return self._data == other._data try: other_model = self.__class__(other) except Exception: @@ -482,6 +533,8 @@ def _is_model(obj: typing.Any) -> bool: def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements if isinstance(o, list): + if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o): + return _ARRAY_ENCODE_MAPPING[format].join(o) return [_serialize(x, format) for x in o] if isinstance(o, dict): return {k: _serialize(v, format) for k, v in o.items()} @@ -513,9 +566,7 @@ def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-m return o -def _get_rest_field( - attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str -) -> typing.Optional["_RestField"]: +def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: try: return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) except StopIteration: @@ -538,7 +589,7 @@ class Model(_MyMutableMapping): _is_model = True # label whether current class's _attr_to_rest_field has been calculated # could not see _attr_to_rest_field directly because subclass inherits it from parent class - _calculated: typing.Set[str] = set() + _calculated: set[str] = set() def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: class_name = self.__class__.__name__ @@ -549,54 +600,9 @@ def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: for rest_field in self._attr_to_rest_field.values() if rest_field._default is not _UNSET } - if args: # pylint: disable=too-many-nested-blocks + if args: if isinstance(args[0], ET.Element): - existed_attr_keys = [] - model_meta = getattr(self, "_xml", {}) - - for rf in self._attr_to_rest_field.values(): - prop_meta = getattr(rf, "_xml", {}) - xml_name = prop_meta.get("name", rf._rest_name) - xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) - if xml_ns: - xml_name = "{" + xml_ns + "}" + xml_name - - # attribute - if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: - existed_attr_keys.append(xml_name) - dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) - continue - - # unwrapped element is array - if prop_meta.get("unwrapped", False): - # unwrapped array could either use prop items meta/prop meta - if prop_meta.get("itemsName"): - xml_name = prop_meta.get("itemsName") - xml_ns = prop_meta.get("itemNs") - if xml_ns: - xml_name = "{" + xml_ns + "}" + xml_name - items = args[0].findall(xml_name) # pyright: ignore - if len(items) > 0: - existed_attr_keys.append(xml_name) - dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) - continue - - # text element is primitive type - if prop_meta.get("text", False): - if args[0].text is not None: - dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) - continue - - # wrapped element could be normal property or array, it should only have one element - item = args[0].find(xml_name) - if item is not None: - existed_attr_keys.append(xml_name) - dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) - - # rest thing is additional properties - for e in args[0]: - if e.tag not in existed_attr_keys: - dict_to_pass[e.tag] = _convert_element(e) + dict_to_pass.update(self._init_from_xml(args[0])) else: dict_to_pass.update( {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} @@ -615,6 +621,69 @@ def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: ) super().__init__(dict_to_pass) + def _init_from_xml(self, element: ET.Element) -> dict[str, typing.Any]: + """Deserialize an XML element into a dict mapping rest field names to values. + + :param ET.Element element: The XML element to deserialize from. + :returns: A dictionary of rest_name to deserialized value pairs. + :rtype: dict + """ + result: dict[str, typing.Any] = {} + model_meta = getattr(self, "_xml", {}) + existed_attr_keys: list[str] = [] + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = _resolve_xml_ns(prop_meta, model_meta) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and element.get(xml_name) is not None: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = _deserialize(rf._type, element.get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + _items_name = prop_meta.get("itemsName") + if _items_name: + xml_name = _items_name + _items_ns = prop_meta.get("itemsNs") + if _items_ns is not None: + xml_ns = _items_ns + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = element.findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = _deserialize(rf._type, items) + elif not rf._is_optional: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = [] + continue + + # text element is primitive type + if prop_meta.get("text", False): + if element.text is not None: + result[rf._rest_name] = _deserialize(rf._type, element.text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = element.find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in element: + if e.tag not in existed_attr_keys: + result[e.tag] = _convert_element(e) + + return result + def copy(self) -> "Model": return Model(self.__dict__) @@ -623,7 +692,7 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order - attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") } annotations = { @@ -638,7 +707,7 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) if not rf._rest_name_input: rf._rest_name_input = attr - cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") return super().__new__(cls) @@ -667,7 +736,7 @@ def _deserialize(cls, data, exist_discriminators): model_meta = getattr(cls, "_xml", {}) prop_meta = getattr(discriminator, "_xml", {}) xml_name = prop_meta.get("name", discriminator._rest_name) - xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + xml_ns = _resolve_xml_ns(prop_meta, model_meta) if xml_ns: xml_name = "{" + xml_ns + "}" + xml_name @@ -680,7 +749,7 @@ def _deserialize(cls, data, exist_discriminators): mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member return mapped_cls._deserialize(data, exist_discriminators) - def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: """Return a dict that can be turned into json using json.dump. :keyword bool exclude_readonly: Whether to remove the readonly properties. @@ -740,7 +809,7 @@ def _deserialize_with_union(deserializers, obj): def _deserialize_dict( value_deserializer: typing.Optional[typing.Callable], module: typing.Optional[str], - obj: typing.Dict[typing.Any, typing.Any], + obj: dict[typing.Any, typing.Any], ): if obj is None: return obj @@ -750,7 +819,7 @@ def _deserialize_dict( def _deserialize_multiple_sequence( - entry_deserializers: typing.List[typing.Optional[typing.Callable]], + entry_deserializers: list[typing.Optional[typing.Callable]], module: typing.Optional[str], obj, ): @@ -759,6 +828,14 @@ def _deserialize_multiple_sequence( return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) +def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool: + return ( + isinstance(deserializer, functools.partial) + and isinstance(deserializer.args[0], functools.partial) + and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable + ) + + def _deserialize_sequence( deserializer: typing.Optional[typing.Callable], module: typing.Optional[str], @@ -768,17 +845,30 @@ def _deserialize_sequence( return obj if isinstance(obj, ET.Element): obj = list(obj) + + # encoded string may be deserialized to sequence + if isinstance(obj, str) and isinstance(deserializer, functools.partial): + # for list[str] + if _is_array_encoded_deserializer(deserializer): + return deserializer(obj) + + # for list[Union[...]] + if isinstance(deserializer.args[0], list): + for sub_deserializer in deserializer.args[0]: + if _is_array_encoded_deserializer(sub_deserializer): + return sub_deserializer(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) -def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: +def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: return sorted( types, key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), ) -def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches annotation: typing.Any, module: typing.Optional[str], rf: typing.Optional["_RestField"] = None, @@ -818,16 +908,18 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur # is it optional? try: - if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore + if rf: + rf._is_optional = True if len(annotation.__args__) <= 2: # pyright: ignore if_obj_deserializer = _get_deserialize_callable_from_annotation( - next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore ) return functools.partial(_deserialize_with_optional, if_obj_deserializer) # the type is Optional[Union[...]], we need to remove the None type from the Union annotation_copy = copy.copy(annotation) - annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) except AttributeError: pass @@ -843,7 +935,10 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur return functools.partial(_deserialize_with_union, deserializers) try: - if annotation._name == "Dict": # pyright: ignore + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() == "dict": value_deserializer = _get_deserialize_callable_from_annotation( annotation.__args__[1], module, rf # pyright: ignore ) @@ -856,7 +951,10 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur except (AttributeError, IndexError): pass try: - if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: if len(annotation.__args__) > 1: # pyright: ignore entry_deserializers = [ _get_deserialize_callable_from_annotation(dt, module, rf) @@ -905,16 +1003,20 @@ def _deserialize_with_callable( return float(value.text) if value.text else None if deserializer is bool: return value.text == "true" if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING.values(): + return deserializer(value.text) if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values(): + return deserializer(value.text) if value.text else None if deserializer is None: return value if deserializer in [int, float, bool]: return deserializer(value) if isinstance(deserializer, CaseInsensitiveEnumMeta): try: - return deserializer(value) + return deserializer(value.text if isinstance(value, ET.Element) else value) except ValueError: # for unknown value, return raw value - return value + return value.text if isinstance(value, ET.Element) else value if isinstance(deserializer, type) and issubclass(deserializer, Model): return deserializer._deserialize(value, []) return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) @@ -940,14 +1042,14 @@ def _deserialize( def _failsafe_deserialize( deserializer: typing.Any, - value: typing.Any, + response: HttpResponse, module: typing.Optional[str] = None, rf: typing.Optional["_RestField"] = None, format: typing.Optional[str] = None, ) -> typing.Any: try: - return _deserialize(deserializer, value, module, rf, format) - except DeserializationError: + return _deserialize(deserializer, response.json(), module, rf, format) + except Exception: # pylint: disable=broad-except _LOGGER.warning( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) @@ -956,17 +1058,18 @@ def _failsafe_deserialize( def _failsafe_deserialize_xml( deserializer: typing.Any, - value: typing.Any, + response: HttpResponse, ) -> typing.Any: try: - return _deserialize_xml(deserializer, value) - except DeserializationError: + return _deserialize_xml(deserializer, response.text()) + except Exception: # pylint: disable=broad-except _LOGGER.warning( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) return None +# pylint: disable=too-many-instance-attributes class _RestField: def __init__( self, @@ -974,11 +1077,11 @@ def __init__( name: typing.Optional[str] = None, type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin is_discriminator: bool = False, - visibility: typing.Optional[typing.List[str]] = None, + visibility: typing.Optional[list[str]] = None, default: typing.Any = _UNSET, format: typing.Optional[str] = None, is_multipart_file_input: bool = False, - xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, ): self._type = type self._rest_name_input = name @@ -986,6 +1089,7 @@ def __init__( self._is_discriminator = is_discriminator self._visibility = visibility self._is_model = False + self._is_optional = False self._default = default self._format = format self._is_multipart_file_input = is_multipart_file_input @@ -993,7 +1097,11 @@ def __init__( @property def _class_type(self) -> typing.Any: - return getattr(self._type, "args", [None])[0] + result = getattr(self._type, "args", [None])[0] + # type may be wrapped by nested functools.partial so we need to check for that + if isinstance(result, functools.partial): + return getattr(result, "args", [None])[0] + return result @property def _rest_name(self) -> str: @@ -1004,14 +1112,37 @@ def _rest_name(self) -> str: def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin # by this point, type and rest_name will have a value bc we default # them in __new__ of the Model class - item = obj.get(self._rest_name) + # Use _data.get() directly to avoid triggering __getitem__ which clears the cache + item = obj._data.get(self._rest_name) if item is None: return item if self._is_model: return item - return _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, we want mutations to directly affect _data + # Check if we've already deserialized this value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + # Return the value from _data directly (it's been deserialized in place) + return obj._data.get(self._rest_name) + + deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, store the deserialized value back in _data + # so mutations directly affect _data + if isinstance(deserialized, (dict, list, set)): + obj._data[self._rest_name] = deserialized + object.__setattr__(obj, cache_attr, True) # Mark as deserialized + return deserialized + + return deserialized def __set__(self, obj: Model, value) -> None: + # Clear the cached deserialized object when setting a new value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + object.__delattr__(obj, cache_attr) + if value is None: # we want to wipe out entries if users set attr to None try: @@ -1036,11 +1167,11 @@ def rest_field( *, name: typing.Optional[str] = None, type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin - visibility: typing.Optional[typing.List[str]] = None, + visibility: typing.Optional[list[str]] = None, default: typing.Any = _UNSET, format: typing.Optional[str] = None, is_multipart_file_input: bool = False, - xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, ) -> typing.Any: return _RestField( name=name, @@ -1057,8 +1188,8 @@ def rest_discriminator( *, name: typing.Optional[str] = None, type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin - visibility: typing.Optional[typing.List[str]] = None, - xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + visibility: typing.Optional[list[str]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, ) -> typing.Any: return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) @@ -1074,21 +1205,77 @@ def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore +def _get_xml_ns(meta: dict[str, typing.Any]) -> typing.Optional[str]: + """Return the XML namespace from a metadata dict, checking both 'ns' (old-style) and 'namespace' (DPG) keys. + + :param dict meta: The metadata dictionary to extract namespace from. + :returns: The namespace string if 'ns' or 'namespace' key is present, None otherwise. + :rtype: str or None + """ + ns = meta.get("ns") + if ns is None: + ns = meta.get("namespace") + return ns + + +def _resolve_xml_ns( + prop_meta: dict[str, typing.Any], model_meta: typing.Optional[dict[str, typing.Any]] = None +) -> typing.Optional[str]: + """Resolve XML namespace for a property, falling back to model namespace when appropriate. + + Checks the property metadata first; if no namespace is found and the model does not declare + an explicit prefix, falls back to the model-level namespace. + + :param dict prop_meta: The property metadata dictionary. + :param dict model_meta: The model metadata dictionary, used as fallback. + :returns: The resolved namespace string, or None. + :rtype: str or None + """ + ns = _get_xml_ns(prop_meta) + if ns is None and model_meta is not None and not model_meta.get("prefix"): + ns = _get_xml_ns(model_meta) + return ns + + +def _set_xml_attribute(element: ET.Element, name: str, value: typing.Any, prop_meta: dict[str, typing.Any]) -> None: + """Set an XML attribute on an element, handling namespace prefix registration. + + :param ET.Element element: The element to set the attribute on. + :param str name: The default attribute name (wire name). + :param any value: The attribute value. + :param dict prop_meta: The property metadata dictionary. + """ + xml_name = prop_meta.get("name", name) + _attr_ns = _get_xml_ns(prop_meta) + if _attr_ns: + _attr_prefix = prop_meta.get("prefix") + if _attr_prefix: + _safe_register_namespace(_attr_prefix, _attr_ns) + xml_name = "{" + _attr_ns + "}" + xml_name + element.set(xml_name, _get_primitive_type_value(value)) + + def _get_element( o: typing.Any, exclude_readonly: bool = False, - parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + parent_meta: typing.Optional[dict[str, typing.Any]] = None, wrapped_element: typing.Optional[ET.Element] = None, -) -> typing.Union[ET.Element, typing.List[ET.Element]]: +) -> typing.Union[ET.Element, list[ET.Element]]: if _is_model(o): model_meta = getattr(o, "_xml", {}) # if prop is a model, then use the prop element directly, else generate a wrapper of model if wrapped_element is None: + # When serializing as an array item (parent_meta is set), check if the parent has an + # explicit itemsName. This ensures correct element names for unwrapped arrays (where + # the element tag is the property/items name, not the model type name). + _items_name = parent_meta.get("itemsName") if parent_meta is not None else None + element_name = _items_name if _items_name else (model_meta.get("name") or o.__class__.__name__) + _model_ns = _get_xml_ns(model_meta) wrapped_element = _create_xml_element( - model_meta.get("name", o.__class__.__name__), + element_name, model_meta.get("prefix"), - model_meta.get("ns"), + _model_ns, ) readonly_props = [] @@ -1110,7 +1297,9 @@ def _get_element( # additional properties will not have rest field, use the wire name as xml name prop_meta = {"name": k} - # if no ns for prop, use model's + # Propagate model namespace to properties only for old-style "ns"-keyed models. + # DPG-generated models use the "namespace" key and explicitly declare namespace on + # each property that needs it, so propagation is intentionally skipped for them. if prop_meta.get("ns") is None and model_meta.get("ns"): prop_meta["ns"] = model_meta.get("ns") prop_meta["prefix"] = model_meta.get("prefix") @@ -1122,12 +1311,7 @@ def _get_element( # text could only set on primitive type wrapped_element.text = _get_primitive_type_value(v) elif prop_meta.get("attribute", False): - xml_name = prop_meta.get("name", k) - if prop_meta.get("ns"): - ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore - xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore - # attribute should be primitive type - wrapped_element.set(xml_name, _get_primitive_type_value(v)) + _set_xml_attribute(wrapped_element, k, v, prop_meta) else: # other wrapped prop element wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) @@ -1136,6 +1320,7 @@ def _get_element( return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore if isinstance(o, dict): result = [] + _dict_ns = _get_xml_ns(parent_meta) if parent_meta else None for k, v in o.items(): result.append( _get_wrapped_element( @@ -1143,7 +1328,7 @@ def _get_element( exclude_readonly, { "name": k, - "ns": parent_meta.get("ns") if parent_meta else None, + "ns": _dict_ns, "prefix": parent_meta.get("prefix") if parent_meta else None, }, ) @@ -1152,13 +1337,16 @@ def _get_element( # primitive case need to create element based on parent_meta if parent_meta: + _items_ns = parent_meta.get("itemsNs") + if _items_ns is None: + _items_ns = _get_xml_ns(parent_meta) return _get_wrapped_element( o, exclude_readonly, { "name": parent_meta.get("itemsName", parent_meta.get("name")), "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), - "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + "ns": _items_ns, }, ) @@ -1168,10 +1356,11 @@ def _get_element( def _get_wrapped_element( v: typing.Any, exclude_readonly: bool, - meta: typing.Optional[typing.Dict[str, typing.Any]], + meta: typing.Optional[dict[str, typing.Any]], ) -> ET.Element: + _meta_ns = _get_xml_ns(meta) if meta else None wrapped_element = _create_xml_element( - meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + meta.get("name") if meta else None, meta.get("prefix") if meta else None, _meta_ns ) if isinstance(v, (dict, list)): wrapped_element.extend(_get_element(v, exclude_readonly, meta)) @@ -1179,7 +1368,7 @@ def _get_wrapped_element( _get_element(v, exclude_readonly, meta, wrapped_element) else: wrapped_element.text = _get_primitive_type_value(v) - return wrapped_element + return wrapped_element # type: ignore[no-any-return] def _get_primitive_type_value(v) -> str: @@ -1192,9 +1381,29 @@ def _get_primitive_type_value(v) -> str: return str(v) -def _create_xml_element(tag, prefix=None, ns=None): - if prefix and ns: +def _safe_register_namespace(prefix: str, ns: str) -> None: + """Register an XML namespace prefix, handling reserved prefix patterns. + + Some prefixes (e.g. 'ns2') match Python's reserved 'ns\\d+' pattern used for + auto-generated prefixes, causing register_namespace to raise ValueError. + Falls back to directly registering in the internal namespace map. + + :param str prefix: The namespace prefix to register. + :param str ns: The namespace URI. + """ + try: ET.register_namespace(prefix, ns) + except ValueError: + _ns_map = getattr(ET, "_namespace_map", None) + if _ns_map is not None: + _ns_map[ns] = prefix + + +def _create_xml_element( + tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None +) -> ET.Element: + if prefix and ns: + _safe_register_namespace(prefix, ns) if ns: return ET.Element("{" + ns + "}" + tag) return ET.Element(tag) @@ -1211,7 +1420,7 @@ def _deserialize_xml( def _convert_element(e: ET.Element): # dict case if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: - dict_result: typing.Dict[str, typing.Any] = {} + dict_result: dict[str, typing.Any] = {} for child in e: if dict_result.get(child.tag) is not None: if isinstance(dict_result[child.tag], list): @@ -1224,7 +1433,7 @@ def _convert_element(e: ET.Element): return dict_result # array case if len(e) > 0: - array_result: typing.List[typing.Any] = [] + array_result: list[typing.Any] = [] for child in e: array_result.append(_convert_element(child)) return array_result diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_utils/serialization.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_utils/serialization.py index eb86ea23c965..81ec1de5922b 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_utils/serialization.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_utils/serialization.py @@ -21,7 +21,6 @@ import sys import codecs from typing import ( - Dict, Any, cast, Optional, @@ -31,7 +30,6 @@ Mapping, Callable, MutableMapping, - List, ) try: @@ -229,12 +227,12 @@ class Model: serialization and deserialization. """ - _subtype_map: Dict[str, Dict[str, Any]] = {} - _attribute_map: Dict[str, Dict[str, Any]] = {} - _validation: Dict[str, Dict[str, Any]] = {} + _subtype_map: dict[str, dict[str, Any]] = {} + _attribute_map: dict[str, dict[str, Any]] = {} + _validation: dict[str, dict[str, Any]] = {} def __init__(self, **kwargs: Any) -> None: - self.additional_properties: Optional[Dict[str, Any]] = {} + self.additional_properties: Optional[dict[str, Any]] = {} for k in kwargs: # pylint: disable=consider-using-dict-items if k not in self._attribute_map: _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) @@ -311,7 +309,7 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: def as_dict( self, keep_readonly: bool = True, - key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer, **kwargs: Any ) -> JSON: """Return a dict that can be serialized using json.dump. @@ -380,7 +378,7 @@ def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: def from_dict( cls, data: Any, - key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None, content_type: Optional[str] = None, ) -> Self: """Parse a dict using given key extractor return a model. @@ -414,7 +412,7 @@ def _flatten_subtype(cls, key, objects): return {} result = dict(cls._subtype_map[key]) for valuetype in cls._subtype_map[key].values(): - result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access return result @classmethod @@ -528,7 +526,7 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: "[]": self.serialize_iter, "{}": self.serialize_dict, } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.dependencies: dict[str, type] = dict(classes) if classes else {} self.key_transformer = full_restapi_key_transformer self.client_side_validation = True @@ -579,7 +577,7 @@ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, to if attr_name == "additional_properties" and attr_desc["key"] == "": if target_obj.additional_properties is not None: - serialized.update(target_obj.additional_properties) + serialized |= target_obj.additional_properties continue try: @@ -789,7 +787,7 @@ def serialize_data(self, data, data_type, **kwargs): # If dependencies is empty, try with current data class # It has to be a subclass of Enum anyway - enum_type = self.dependencies.get(data_type, data.__class__) + enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) if issubclass(enum_type, Enum): return Serializer.serialize_enum(data, enum_obj=enum_type) @@ -823,13 +821,20 @@ def serialize_basic(cls, data, data_type, **kwargs): :param str data_type: Type of object in the iterable. :rtype: str, int, float, bool :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) @classmethod def serialize_unicode(cls, data): @@ -1184,7 +1189,7 @@ def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argumen while "." in key: # Need the cast, as for some reasons "split" is typed as list[str | Any] - dict_keys = cast(List[str], _FLATTEN.split(key)) + dict_keys = cast(list[str], _FLATTEN.split(key)) if len(dict_keys) == 1: key = _decode_attribute_map_key(dict_keys[0]) break @@ -1386,7 +1391,7 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: "duration": (isodate.Duration, datetime.timedelta), "iso-8601": (datetime.datetime), } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.dependencies: dict[str, type] = dict(classes) if classes else {} self.key_extractors = [rest_key_extractor, xml_key_extractor] # Additional properties only works if the "rest_key_extractor" is used to # extract the keys. Making it to work whatever the key extractor is too much @@ -1759,7 +1764,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return :param str data_type: deserialization data type. :return: Deserialized basic type. :rtype: str, int, float or bool - :raises TypeError: if string format is not valid. + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1785,7 +1790,11 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) @staticmethod def deserialize_unicode(data): diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_validation.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_validation.py index 752b2822f9d3..f5af3a4eb8a2 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_validation.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_validation.py @@ -10,6 +10,22 @@ def api_version_validation(**kwargs): params_added_on = kwargs.pop("params_added_on", {}) method_added_on = kwargs.pop("method_added_on", "") + api_versions_list = kwargs.pop("api_versions_list", []) + + def _index_with_default(value: str, default: int = -1) -> int: + """Get the index of value in lst, or return default if not found. + + :param value: The value to search for in the api_versions_list. + :type value: str + :param default: The default value to return if the value is not found. + :type default: int + :return: The index of the value in the list, or the default value if not found. + :rtype: int + """ + try: + return api_versions_list.index(value) + except ValueError: + return default def decorator(func): @functools.wraps(func) @@ -21,7 +37,7 @@ def wrapper(*args, **kwargs): except AttributeError: return func(*args, **kwargs) - if method_added_on > client_api_version: + if _index_with_default(method_added_on) > _index_with_default(client_api_version): raise ValueError( f"'{func.__name__}' is not available in API version " f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." @@ -31,7 +47,7 @@ def wrapper(*args, **kwargs): parameter: api_version for api_version, parameters in params_added_on.items() for parameter in parameters - if parameter in kwargs and api_version > client_api_version + if parameter in kwargs and _index_with_default(api_version) > _index_with_default(client_api_version) } if unsupported: raise ValueError( diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/_client.py index f6eec1768ab3..de369772c92e 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/_client.py @@ -16,15 +16,15 @@ from .._utils.serialization import Deserializer, Serializer from ._configuration import KeyVaultClientConfiguration -from .operations import KeyVaultClientOperationsMixin, RoleAssignmentsOperations, RoleDefinitionsOperations +from .operations import RoleAssignmentsOperations, RoleDefinitionsOperations, _KeyVaultClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential -class KeyVaultClient(KeyVaultClientOperationsMixin): - """The key vault client performs cryptographic key operations and vault operations against the Key - Vault service. +class KeyVaultClient(_KeyVaultClientOperationsMixin): + """The Azure Key Vault Administration service client performs administrative operations including + RBAC, BackupRestore, and settings management against the Azure Key Vault service. :ivar role_definitions: RoleDefinitionsOperations operations :vartype role_definitions: @@ -32,12 +32,14 @@ class KeyVaultClient(KeyVaultClientOperationsMixin): :ivar role_assignments: RoleAssignmentsOperations operations :vartype role_assignments: azure.keyvault.administration._generated.aio.operations.RoleAssignmentsOperations - :param vault_base_url: Required. + :param vault_base_url: The base URL of the Key Vault instance (e.g. + `https://myvault.vault.azure.net/ `_). Required. :type vault_base_url: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :keyword api_version: The API version to use for this operation. Default value is "7.6". Note - that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-07-01". + Default value is "2025-07-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/_configuration.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/_configuration.py index aac7f8b37d37..8048741ec595 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/_configuration.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/_configuration.py @@ -22,17 +22,19 @@ class KeyVaultClientConfiguration: # pylint: disable=too-many-instance-attribut Note that all parameters used to create this instance are saved as instance attributes. - :param vault_base_url: Required. + :param vault_base_url: The base URL of the Key Vault instance (e.g. + `https://myvault.vault.azure.net/ `_). Required. :type vault_base_url: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :keyword api_version: The API version to use for this operation. Default value is "7.6". Note - that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-07-01". + Default value is "2025-07-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str """ def __init__(self, vault_base_url: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "7.6") + api_version: str = kwargs.pop("api_version", "2025-07-01") if vault_base_url is None: raise ValueError("Parameter 'vault_base_url' must not be None.") diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/__init__.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/__init__.py index 2318933b2c83..96a14545b22a 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/__init__.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/__init__.py @@ -14,7 +14,7 @@ from ._operations import RoleDefinitionsOperations # type: ignore from ._operations import RoleAssignmentsOperations # type: ignore -from ._operations import KeyVaultClientOperationsMixin # type: ignore +from ._operations import _KeyVaultClientOperationsMixin # type: ignore # pylint: disable=unused-import from ._patch import __all__ as _patch_all from ._patch import * @@ -23,7 +23,6 @@ __all__ = [ "RoleDefinitionsOperations", "RoleAssignmentsOperations", - "KeyVaultClientOperationsMixin", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/_operations.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/_operations.py index 814c54aa59b5..fb53df6115bc 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/_operations.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/_operations.py @@ -9,7 +9,7 @@ from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload +from typing import Any, AsyncIterator, Callable, IO, Optional, TypeVar, Union, cast, overload import urllib.parse from azure.core import AsyncPipelineClient @@ -62,7 +62,8 @@ JSON = MutableMapping[str, Any] T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class RoleDefinitionsOperations: @@ -122,6 +123,7 @@ async def delete(self, scope: str, role_definition_name: str, **kwargs: Any) -> } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -136,11 +138,14 @@ async def delete(self, scope: str, role_definition_name: str, **kwargs: Any) -> except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RoleDefinition, response.json()) @@ -296,6 +301,7 @@ async def create_or_update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -310,11 +316,14 @@ async def create_or_update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RoleDefinition, response.json()) @@ -362,6 +371,7 @@ async def get(self, scope: str, role_definition_name: str, **kwargs: Any) -> _mo } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -376,11 +386,14 @@ async def get(self, scope: str, role_definition_name: str, **kwargs: Any) -> _mo except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RoleDefinition, response.json()) @@ -392,7 +405,7 @@ async def get(self, scope: str, role_definition_name: str, **kwargs: Any) -> _mo @distributed_trace def list( self, scope: str, *, filter: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.RoleDefinition"]: + ) -> AsyncItemPaged["_models.RoleDefinition"]: """Get all role definitions that are applicable at scope and above. :param scope: The scope of the role definition. Required. @@ -459,7 +472,10 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.RoleDefinition], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.RoleDefinition], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -475,7 +491,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) return pipeline_response @@ -539,6 +558,7 @@ async def delete(self, scope: str, role_assignment_name: str, **kwargs: Any) -> } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -553,11 +573,14 @@ async def delete(self, scope: str, role_assignment_name: str, **kwargs: Any) -> except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RoleAssignment, response.json()) @@ -709,6 +732,7 @@ async def create( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -723,11 +747,14 @@ async def create( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RoleAssignment, response.json()) @@ -775,6 +802,7 @@ async def get(self, scope: str, role_assignment_name: str, **kwargs: Any) -> _mo } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -789,11 +817,14 @@ async def get(self, scope: str, role_assignment_name: str, **kwargs: Any) -> _mo except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RoleAssignment, response.json()) @@ -805,7 +836,7 @@ async def get(self, scope: str, role_assignment_name: str, **kwargs: Any) -> _mo @distributed_trace def list_for_scope( self, scope: str, *, filter: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.RoleAssignment"]: + ) -> AsyncItemPaged["_models.RoleAssignment"]: """Gets role assignments for a scope. :param scope: The scope of the role assignments. Required. @@ -873,7 +904,10 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.RoleAssignment], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.RoleAssignment], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -889,7 +923,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) return pipeline_response @@ -897,7 +934,7 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) -class KeyVaultClientOperationsMixin( +class _KeyVaultClientOperationsMixin( ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], KeyVaultClientConfiguration] ): @@ -937,6 +974,7 @@ async def full_backup_status(self, job_id: str, **kwargs: Any) -> _models.FullBa } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -951,11 +989,14 @@ async def full_backup_status(self, job_id: str, **kwargs: Any) -> _models.FullBa except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.FullBackupOperation, response.json()) @@ -1002,6 +1043,7 @@ async def _full_backup_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1015,7 +1057,10 @@ async def _full_backup_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -1024,7 +1069,7 @@ async def _full_backup_initial( ) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1174,8 +1219,9 @@ def get_long_running_output(pipeline_response): ) @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6", "2025-06-01-preview", "2025-07-01"], ) async def _pre_full_backup_initial( self, @@ -1217,6 +1263,7 @@ async def _pre_full_backup_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1230,7 +1277,10 @@ async def _pre_full_backup_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -1239,7 +1289,7 @@ async def _pre_full_backup_initial( ) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1310,8 +1360,9 @@ async def begin_pre_full_backup( @distributed_trace_async @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6", "2025-06-01-preview", "2025-07-01"], ) async def begin_pre_full_backup( self, @@ -1427,6 +1478,7 @@ async def restore_status(self, job_id: str, **kwargs: Any) -> _models.RestoreOpe } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1441,11 +1493,14 @@ async def restore_status(self, job_id: str, **kwargs: Any) -> _models.RestoreOpe except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RestoreOperation, response.json()) @@ -1492,6 +1547,7 @@ async def _full_restore_operation_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1505,7 +1561,10 @@ async def _full_restore_operation_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -1514,7 +1573,7 @@ async def _full_restore_operation_initial( ) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1665,8 +1724,9 @@ def get_long_running_output(pipeline_response): ) @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6", "2025-06-01-preview", "2025-07-01"], ) async def _pre_full_restore_operation_initial( self, @@ -1708,6 +1768,7 @@ async def _pre_full_restore_operation_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1721,7 +1782,10 @@ async def _pre_full_restore_operation_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -1730,7 +1794,7 @@ async def _pre_full_restore_operation_initial( ) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1801,8 +1865,9 @@ async def begin_pre_full_restore_operation( @distributed_trace_async @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6", "2025-06-01-preview", "2025-07-01"], ) async def begin_pre_full_restore_operation( self, @@ -1919,6 +1984,7 @@ async def selective_key_restore_status(self, job_id: str, **kwargs: Any) -> _mod } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1933,11 +1999,14 @@ async def selective_key_restore_status(self, job_id: str, **kwargs: Any) -> _mod except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.SelectiveKeyRestoreOperation, response.json()) @@ -1988,6 +2057,7 @@ async def _selective_key_restore_operation_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2001,7 +2071,10 @@ async def _selective_key_restore_operation_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -2010,7 +2083,7 @@ async def _selective_key_restore_operation_initial( ) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2300,6 +2373,7 @@ async def update_setting( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2314,11 +2388,14 @@ async def update_setting( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Setting, response.json()) @@ -2366,6 +2443,7 @@ async def get_setting(self, setting_name: str, **kwargs: Any) -> _models.Setting } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2380,11 +2458,14 @@ async def get_setting(self, setting_name: str, **kwargs: Any) -> _models.Setting except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Setting, response.json()) @@ -2428,6 +2509,7 @@ async def get_settings(self, **kwargs: Any) -> _models.SettingsListResult: } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2442,11 +2524,14 @@ async def get_settings(self, **kwargs: Any) -> _models.SettingsListResult: except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.SettingsListResult, response.json()) diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/models/_enums.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/models/_enums.py index 4bb8bdeff7b5..5057cd87aa3a 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/models/_enums.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/models/_enums.py @@ -107,9 +107,9 @@ class RoleScope(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The role scope.""" GLOBAL = "/" - """Global scope""" + """Global scope.""" KEYS = "/keys" - """Keys scope""" + """Keys scope.""" class RoleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/models/_models.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/models/_models.py index bc474e792b41..d2e0d1bed8bf 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/models/_models.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/models/_models.py @@ -8,7 +8,7 @@ # pylint: disable=useless-super-delegation import datetime -from typing import Any, List, Mapping, Optional, TYPE_CHECKING, Union, overload +from typing import Any, Mapping, Optional, TYPE_CHECKING, Union, overload from .._utils.model_base import Model as _Model, rest_field @@ -135,18 +135,18 @@ class Permission(_Model): ~azure.keyvault.administration._generated.models.DataAction] """ - actions: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + actions: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Action permissions that are granted.""" - not_actions: Optional[List[str]] = rest_field( + not_actions: Optional[list[str]] = rest_field( name="notActions", visibility=["read", "create", "update", "delete", "query"] ) """Action permissions that are excluded but not denied. They may be granted by other role definitions assigned to a principal.""" - data_actions: Optional[List[Union[str, "_models.DataAction"]]] = rest_field( + data_actions: Optional[list[Union[str, "_models.DataAction"]]] = rest_field( name="dataActions", visibility=["read", "create", "update", "delete", "query"] ) """Data action permissions that are granted.""" - not_data_actions: Optional[List[Union[str, "_models.DataAction"]]] = rest_field( + not_data_actions: Optional[list[Union[str, "_models.DataAction"]]] = rest_field( name="notDataActions", visibility=["read", "create", "update", "delete", "query"] ) """Data action permissions that are excluded but not denied. They may be granted by other role @@ -156,10 +156,10 @@ class Permission(_Model): def __init__( self, *, - actions: Optional[List[str]] = None, - not_actions: Optional[List[str]] = None, - data_actions: Optional[List[Union[str, "_models.DataAction"]]] = None, - not_data_actions: Optional[List[Union[str, "_models.DataAction"]]] = None, + actions: Optional[list[str]] = None, + not_actions: Optional[list[str]] = None, + data_actions: Optional[list[Union[str, "_models.DataAction"]]] = None, + not_data_actions: Optional[list[Union[str, "_models.DataAction"]]] = None, ) -> None: ... @overload @@ -633,11 +633,11 @@ class RoleDefinitionProperties(_Model): name="type", visibility=["read", "create", "update", "delete", "query"] ) """The role type. Known values are: \"AKVBuiltInRole\" and \"CustomRole\".""" - permissions: Optional[List["_models.Permission"]] = rest_field( + permissions: Optional[list["_models.Permission"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) """Role definition permissions.""" - assignable_scopes: Optional[List[Union[str, "_models.RoleScope"]]] = rest_field( + assignable_scopes: Optional[list[Union[str, "_models.RoleScope"]]] = rest_field( name="assignableScopes", visibility=["read", "create", "update", "delete", "query"] ) """Role definition assignable scopes.""" @@ -649,8 +649,8 @@ def __init__( role_name: Optional[str] = None, description: Optional[str] = None, role_type: Optional[Union[str, "_models.RoleType"]] = None, - permissions: Optional[List["_models.Permission"]] = None, - assignable_scopes: Optional[List[Union[str, "_models.RoleScope"]]] = None, + permissions: Optional[list["_models.Permission"]] = None, + assignable_scopes: Optional[list[Union[str, "_models.RoleScope"]]] = None, ) -> None: ... @overload @@ -861,7 +861,7 @@ class SettingsListResult(_Model): :vartype settings: list[~azure.keyvault.administration._generated.models.Setting] """ - settings: Optional[List["_models.Setting"]] = rest_field(visibility=["read"]) + settings: Optional[list["_models.Setting"]] = rest_field(visibility=["read"]) """A response message containing a list of account settings with their associated value.""" diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/__init__.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/__init__.py index 2318933b2c83..96a14545b22a 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/__init__.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/__init__.py @@ -14,7 +14,7 @@ from ._operations import RoleDefinitionsOperations # type: ignore from ._operations import RoleAssignmentsOperations # type: ignore -from ._operations import KeyVaultClientOperationsMixin # type: ignore +from ._operations import _KeyVaultClientOperationsMixin # type: ignore # pylint: disable=unused-import from ._patch import __all__ as _patch_all from ._patch import * @@ -23,7 +23,6 @@ __all__ = [ "RoleDefinitionsOperations", "RoleAssignmentsOperations", - "KeyVaultClientOperationsMixin", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/_operations.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/_operations.py index 8c5d159ae978..32511186d843 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/_operations.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/_operations.py @@ -9,7 +9,7 @@ from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, Callable, Dict, IO, Iterable, Iterator, List, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, IO, Iterator, Optional, TypeVar, Union, cast, overload import urllib.parse from azure.core import PipelineClient @@ -40,7 +40,8 @@ JSON = MutableMapping[str, Any] T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -50,7 +51,7 @@ def build_role_definitions_delete_request(scope: str, role_definition_name: str, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -78,7 +79,7 @@ def build_role_definitions_create_or_update_request( # pylint: disable=name-too _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -105,7 +106,7 @@ def build_role_definitions_get_request(scope: str, role_definition_name: str, ** _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -130,7 +131,7 @@ def build_role_definitions_list_request(scope: str, *, filter: Optional[str] = N _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -156,7 +157,7 @@ def build_role_assignments_delete_request(scope: str, role_assignment_name: str, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -182,7 +183,7 @@ def build_role_assignments_create_request(scope: str, role_assignment_name: str, _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -209,7 +210,7 @@ def build_role_assignments_get_request(scope: str, role_assignment_name: str, ** _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -236,7 +237,7 @@ def build_role_assignments_list_for_scope_request( # pylint: disable=name-too-l _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -264,7 +265,7 @@ def build_key_vault_full_backup_status_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -289,7 +290,7 @@ def build_key_vault_full_backup_request(**kwargs: Any) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -311,7 +312,7 @@ def build_key_vault_pre_full_backup_request(**kwargs: Any) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -332,7 +333,7 @@ def build_key_vault_restore_status_request(job_id: str, **kwargs: Any) -> HttpRe _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -357,7 +358,7 @@ def build_key_vault_full_restore_operation_request(**kwargs: Any) -> HttpRequest _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -379,7 +380,7 @@ def build_key_vault_pre_full_restore_operation_request(**kwargs: Any) -> HttpReq _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -402,7 +403,7 @@ def build_key_vault_selective_key_restore_status_request( # pylint: disable=nam _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -429,7 +430,7 @@ def build_key_vault_selective_key_restore_operation_request( # pylint: disable= _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -456,7 +457,7 @@ def build_key_vault_update_setting_request(setting_name: str, **kwargs: Any) -> _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -482,7 +483,7 @@ def build_key_vault_get_setting_request(setting_name: str, **kwargs: Any) -> Htt _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -506,7 +507,7 @@ def build_key_vault_get_settings_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.6")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -578,6 +579,7 @@ def delete(self, scope: str, role_definition_name: str, **kwargs: Any) -> _model } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -592,11 +594,14 @@ def delete(self, scope: str, role_definition_name: str, **kwargs: Any) -> _model except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RoleDefinition, response.json()) @@ -752,6 +757,7 @@ def create_or_update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -766,11 +772,14 @@ def create_or_update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RoleDefinition, response.json()) @@ -818,6 +827,7 @@ def get(self, scope: str, role_definition_name: str, **kwargs: Any) -> _models.R } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -832,11 +842,14 @@ def get(self, scope: str, role_definition_name: str, **kwargs: Any) -> _models.R except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RoleDefinition, response.json()) @@ -846,7 +859,7 @@ def get(self, scope: str, role_definition_name: str, **kwargs: Any) -> _models.R return deserialized # type: ignore @distributed_trace - def list(self, scope: str, *, filter: Optional[str] = None, **kwargs: Any) -> Iterable["_models.RoleDefinition"]: + def list(self, scope: str, *, filter: Optional[str] = None, **kwargs: Any) -> ItemPaged["_models.RoleDefinition"]: """Get all role definitions that are applicable at scope and above. :param scope: The scope of the role definition. Required. @@ -913,7 +926,10 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.RoleDefinition], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.RoleDefinition], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -929,7 +945,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) return pipeline_response @@ -993,6 +1012,7 @@ def delete(self, scope: str, role_assignment_name: str, **kwargs: Any) -> _model } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1007,11 +1027,14 @@ def delete(self, scope: str, role_assignment_name: str, **kwargs: Any) -> _model except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RoleAssignment, response.json()) @@ -1163,6 +1186,7 @@ def create( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1177,11 +1201,14 @@ def create( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RoleAssignment, response.json()) @@ -1229,6 +1256,7 @@ def get(self, scope: str, role_assignment_name: str, **kwargs: Any) -> _models.R } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1243,11 +1271,14 @@ def get(self, scope: str, role_assignment_name: str, **kwargs: Any) -> _models.R except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RoleAssignment, response.json()) @@ -1259,7 +1290,7 @@ def get(self, scope: str, role_assignment_name: str, **kwargs: Any) -> _models.R @distributed_trace def list_for_scope( self, scope: str, *, filter: Optional[str] = None, **kwargs: Any - ) -> Iterable["_models.RoleAssignment"]: + ) -> ItemPaged["_models.RoleAssignment"]: """Gets role assignments for a scope. :param scope: The scope of the role assignments. Required. @@ -1327,7 +1358,10 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.RoleAssignment], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.RoleAssignment], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -1343,7 +1377,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) return pipeline_response @@ -1351,7 +1388,7 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) -class KeyVaultClientOperationsMixin( +class _KeyVaultClientOperationsMixin( ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], KeyVaultClientConfiguration] ): @@ -1391,6 +1428,7 @@ def full_backup_status(self, job_id: str, **kwargs: Any) -> _models.FullBackupOp } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1405,11 +1443,14 @@ def full_backup_status(self, job_id: str, **kwargs: Any) -> _models.FullBackupOp except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.FullBackupOperation, response.json()) @@ -1456,6 +1497,7 @@ def _full_backup_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1469,7 +1511,10 @@ def _full_backup_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -1478,7 +1523,7 @@ def _full_backup_initial( ) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1627,8 +1672,9 @@ def get_long_running_output(pipeline_response): ) @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6", "2025-06-01-preview", "2025-07-01"], ) def _pre_full_backup_initial( self, @@ -1670,6 +1716,7 @@ def _pre_full_backup_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1683,7 +1730,10 @@ def _pre_full_backup_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -1692,7 +1742,7 @@ def _pre_full_backup_initial( ) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1763,8 +1813,9 @@ def begin_pre_full_backup( @distributed_trace @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6", "2025-06-01-preview", "2025-07-01"], ) def begin_pre_full_backup( self, @@ -1879,6 +1930,7 @@ def restore_status(self, job_id: str, **kwargs: Any) -> _models.RestoreOperation } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1893,11 +1945,14 @@ def restore_status(self, job_id: str, **kwargs: Any) -> _models.RestoreOperation except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.RestoreOperation, response.json()) @@ -1944,6 +1999,7 @@ def _full_restore_operation_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1957,7 +2013,10 @@ def _full_restore_operation_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -1966,7 +2025,7 @@ def _full_restore_operation_initial( ) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2116,8 +2175,9 @@ def get_long_running_output(pipeline_response): ) @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6", "2025-06-01-preview", "2025-07-01"], ) def _pre_full_restore_operation_initial( self, @@ -2159,6 +2219,7 @@ def _pre_full_restore_operation_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2172,7 +2233,10 @@ def _pre_full_restore_operation_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -2181,7 +2245,7 @@ def _pre_full_restore_operation_initial( ) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2252,8 +2316,9 @@ def begin_pre_full_restore_operation( @distributed_trace @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6", "2025-06-01-preview", "2025-07-01"], ) def begin_pre_full_restore_operation( self, @@ -2369,6 +2434,7 @@ def selective_key_restore_status(self, job_id: str, **kwargs: Any) -> _models.Se } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2383,11 +2449,14 @@ def selective_key_restore_status(self, job_id: str, **kwargs: Any) -> _models.Se except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.SelectiveKeyRestoreOperation, response.json()) @@ -2438,6 +2507,7 @@ def _selective_key_restore_operation_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2451,7 +2521,10 @@ def _selective_key_restore_operation_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -2460,7 +2533,7 @@ def _selective_key_restore_operation_initial( ) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2749,6 +2822,7 @@ def update_setting( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2763,11 +2837,14 @@ def update_setting( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Setting, response.json()) @@ -2815,6 +2892,7 @@ def get_setting(self, setting_name: str, **kwargs: Any) -> _models.Setting: } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2829,11 +2907,14 @@ def get_setting(self, setting_name: str, **kwargs: Any) -> _models.Setting: except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Setting, response.json()) @@ -2877,6 +2958,7 @@ def get_settings(self, **kwargs: Any) -> _models.SettingsListResult: } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2891,11 +2973,14 @@ def get_settings(self, **kwargs: Any) -> _models.SettingsListResult: except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + error = _failsafe_deserialize( + _models.KeyVaultError, + response, + ) raise HttpResponseError(response=response, model=error) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.SettingsListResult, response.json()) diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_challenge_auth_policy.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_challenge_auth_policy.py index dad851f8f58c..3e3ac1855178 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_challenge_auth_policy.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_challenge_auth_policy.py @@ -82,9 +82,7 @@ def __init__(self, credential: AsyncTokenProvider, *scopes: str, **kwargs: Any) self._verify_challenge_resource = kwargs.pop("verify_challenge_resource", True) self._request_copy: Optional[HttpRequest] = None - async def send( - self, request: PipelineRequest[HttpRequest] - ) -> PipelineResponse[HttpRequest, AsyncHttpResponse]: + async def send(self, request: PipelineRequest[HttpRequest]) -> PipelineResponse[HttpRequest, AsyncHttpResponse]: """Authorize request with a bearer token and send it to the next policy. We implement this method to account for the valid scenario where a Key Vault authentication challenge is @@ -155,7 +153,6 @@ async def handle_challenge_flow( await await_result(self.on_response, request, response) return response - async def on_request(self, request: PipelineRequest) -> None: _enforce_tls(request) challenge = ChallengeCache.get_challenge_for_url(request.http_request.url) @@ -184,7 +181,6 @@ async def on_request(self, request: PipelineRequest) -> None: bodiless_request.headers["Content-Length"] = "0" request.http_request = bodiless_request - async def on_challenge(self, request: PipelineRequest, response: PipelineResponse) -> bool: try: # CAE challenges may not include a scope or tenant; cache from the previous challenge to use if necessary @@ -227,9 +223,7 @@ async def on_challenge(self, request: PipelineRequest, response: PipelineRespons if challenge.tenant_id and challenge.tenant_id.lower().endswith("adfs"): await self.authorize_request(request, scope, claims=challenge.claims) else: - await self.authorize_request( - request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id - ) + await self.authorize_request(request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id) return True diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_client_base.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_client_base.py index 367a99fae45b..f47722f72f97 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_client_base.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_client_base.py @@ -44,11 +44,7 @@ def __init__(self, vault_url: str, credential: AsyncTokenCredential, **kwargs: A http_logging_policy = HttpLoggingPolicy(**kwargs) http_logging_policy.allowed_header_names.update( - { - "x-ms-keyvault-network-info", - "x-ms-keyvault-region", - "x-ms-keyvault-service-version" - } + {"x-ms-keyvault-network-info", "x-ms-keyvault-region", "x-ms-keyvault-service-version"} ) verify_challenge = kwargs.pop("verify_challenge_resource", True) @@ -59,7 +55,7 @@ def __init__(self, vault_url: str, credential: AsyncTokenCredential, **kwargs: A authentication_policy=AsyncChallengeAuthPolicy(credential, verify_challenge_resource=verify_challenge), sdk_moniker=SDK_MONIKER, http_logging_policy=http_logging_policy, - **kwargs + **kwargs, ) self._models = _models except ValueError as exc: diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/client_base.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/client_base.py index d0efb46a1ebd..77401fd15ba6 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/client_base.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/client_base.py @@ -103,7 +103,7 @@ def __init__(self, vault_url: str, credential: TokenCredential, **kwargs: Any) - authentication_policy=ChallengeAuthPolicy(credential, verify_challenge_resource=verify_challenge), sdk_moniker=SDK_MONIKER, http_logging_policy=http_logging_policy, - **kwargs + **kwargs, ) self._models = _models except ValueError as exc: diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/http_challenge.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/http_challenge.py index 0320df5a868b..8b14b999de78 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/http_challenge.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/http_challenge.py @@ -148,7 +148,9 @@ def supports_message_protection(self) -> bool: """ return self.supports_pop() and self.server_encryption_key and self.server_signature_key # type: ignore - def _validate_challenge(self, challenge: str) -> str: # pylint:disable=bad-option-value,useless-option-value,no-self-use + def _validate_challenge( + self, challenge: str + ) -> str: # pylint:disable=bad-option-value,useless-option-value,no-self-use """Verifies that the challenge is a valid auth challenge and returns the key=value pairs. :param str challenge: The WWW-Authenticate header of the challenge response. @@ -161,7 +163,9 @@ def _validate_challenge(self, challenge: str) -> str: # pylint:disable=bad-opti return challenge.strip() - def _validate_request_uri(self, uri: str) -> str: # pylint:disable=bad-option-value,useless-option-value,no-self-use + def _validate_request_uri( + self, uri: str + ) -> str: # pylint:disable=bad-option-value,useless-option-value,no-self-use """Extracts the host authority from the given URI. :param str uri: The URI of the HTTP request that prompted the challenge. diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/polling.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/polling.py index 955c6a9ea6a6..1493f00e2792 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/polling.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/polling.py @@ -36,7 +36,7 @@ class KeyVaultBackupClientPollingMethod(LROBasePolling): def get_continuation_token(self) -> str: """ Get a continuation token to resume the polling later. - + :return: A continuation token. :rtype: str """ diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_models.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_models.py index b944ed0c3e91..d92c517b7510 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_models.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_models.py @@ -70,9 +70,11 @@ def _from_generated(cls, role_assignment: RoleAssignment) -> "KeyVaultRoleAssign role_assignment_id=role_assignment.id, name=role_assignment.name, assignment_type=role_assignment.type, - properties=KeyVaultRoleAssignmentProperties._from_generated(role_assignment.properties) - if role_assignment.properties - else KeyVaultRoleAssignmentProperties(), + properties=( + KeyVaultRoleAssignmentProperties._from_generated(role_assignment.properties) + if role_assignment.properties + else KeyVaultRoleAssignmentProperties() + ), ) @@ -144,9 +146,11 @@ def _from_generated(cls, definition: RoleDefinition) -> "KeyVaultRoleDefinition" description=definition.properties.description if definition.properties else None, id=definition.id, name=definition.name, - permissions=[KeyVaultPermission._from_generated(p) for p in definition.properties.permissions or []] - if definition.properties - else None, + permissions=( + [KeyVaultPermission._from_generated(p) for p in definition.properties.permissions or []] + if definition.properties + else None + ), role_name=definition.properties.role_name if definition.properties else None, role_type=definition.properties.role_type if definition.properties else None, type=definition.type, diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_settings_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_settings_client.py index ab4b5ffdbbff..67dad85e648e 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_settings_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_settings_client.py @@ -27,6 +27,7 @@ class KeyVaultSettingsClient(KeyVaultClientBase): :keyword bool verify_challenge_resource: Whether to verify the authentication challenge resource matches the Key Vault or Managed HSM domain. Defaults to True. """ + # pylint:disable=protected-access @distributed_trace @@ -75,11 +76,7 @@ def update_setting(self, setting: KeyVaultSetting, **kwargs: Any) -> KeyVaultSet :raises ~azure.core.exceptions.HttpResponseError: """ parameters = UpdateSettingRequest(value=setting.value) - result = self._client.update_setting( - setting_name=setting.name, - parameters=parameters, - **kwargs - ) + result = self._client.update_setting(setting_name=setting.name, parameters=parameters, **kwargs) return KeyVaultSetting._from_generated(result) def __enter__(self) -> "KeyVaultSettingsClient": diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_access_control_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_access_control_client.py index b6ce9fe5cae8..56fc0bb2c3f3 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_access_control_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_access_control_client.py @@ -68,15 +68,10 @@ async def create_role_assignment( assignment_name = name or uuid4() create_parameters = RoleAssignmentCreateParameters( - properties=RoleAssignmentProperties( - principal_id=principal_id, role_definition_id=str(definition_id) - ) + properties=RoleAssignmentProperties(principal_id=principal_id, role_definition_id=str(definition_id)) ) assignment = await self._client.role_assignments.create( - scope=scope, - role_assignment_name=str(assignment_name), - parameters=create_parameters, - **kwargs + scope=scope, role_assignment_name=str(assignment_name), parameters=create_parameters, **kwargs ) return KeyVaultRoleAssignment._from_generated(assignment) @@ -96,9 +91,7 @@ async def delete_role_assignment( :rtype: None """ try: - await self._client.role_assignments.delete( - scope=scope, role_assignment_name=str(name), **kwargs - ) + await self._client.role_assignments.delete(scope=scope, role_assignment_name=str(name), **kwargs) except ResourceNotFoundError: pass @@ -117,9 +110,7 @@ async def get_role_assignment( :returns: The fetched role assignment. :rtype: ~azure.keyvault.administration.KeyVaultRoleAssignment """ - assignment = await self._client.role_assignments.get( - scope=scope, role_assignment_name=str(name), **kwargs - ) + assignment = await self._client.role_assignments.get(scope=scope, role_assignment_name=str(name), **kwargs) return KeyVaultRoleAssignment._from_generated(assignment) @distributed_trace @@ -136,9 +127,7 @@ def list_role_assignments( :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.administration.KeyVaultRoleAssignment] """ return self._client.role_assignments.list_for_scope( - scope=scope, - cls=lambda result: [KeyVaultRoleAssignment._from_generated(a) for a in result], - **kwargs + scope=scope, cls=lambda result: [KeyVaultRoleAssignment._from_generated(a) for a in result], **kwargs ) @distributed_trace_async @@ -199,10 +188,7 @@ async def set_role_definition( parameters = RoleDefinitionCreateParameters(properties=properties) definition = await self._client.role_definitions.create_or_update( - scope=scope, - role_definition_name=str(name or uuid4()), - parameters=parameters, - **kwargs + scope=scope, role_definition_name=str(name or uuid4()), parameters=parameters, **kwargs ) return KeyVaultRoleDefinition._from_generated(definition) @@ -221,9 +207,7 @@ async def get_role_definition( :returns: The fetched role definition. :rtype: ~azure.keyvault.administration.KeyVaultRoleDefinition """ - definition = await self._client.role_definitions.get( - scope=scope, role_definition_name=str(name), **kwargs - ) + definition = await self._client.role_definitions.get(scope=scope, role_definition_name=str(name), **kwargs) return KeyVaultRoleDefinition._from_generated(definition) @distributed_trace_async @@ -242,9 +226,7 @@ async def delete_role_definition( :rtype: None """ try: - await self._client.role_definitions.delete( - scope=scope, role_definition_name=str(name), **kwargs - ) + await self._client.role_definitions.delete(scope=scope, role_definition_name=str(name), **kwargs) except ResourceNotFoundError: pass @@ -262,9 +244,7 @@ def list_role_definitions( :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.administration.KeyVaultRoleDefinition] """ return self._client.role_definitions.list( - scope=scope, - cls=lambda result: [KeyVaultRoleDefinition._from_generated(d) for d in result], - **kwargs + scope=scope, cls=lambda result: [KeyVaultRoleDefinition._from_generated(d) for d in result], **kwargs ) async def __aenter__(self) -> "KeyVaultAccessControlClient": diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_backup_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_backup_client.py index f3dc496572bb..64fe36525472 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_backup_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_backup_client.py @@ -45,9 +45,7 @@ async def _use_continuation_token(self, continuation_token: str, status_method: + "poller's continuation_token() method" ) from ex - pipeline_response = await status_method( - job_id=job_id, cls=lambda pipeline_response, _, __: pipeline_response - ) + pipeline_response = await status_method(job_id=job_id, cls=lambda pipeline_response, _, __: pipeline_response) if "azure-asyncoperation" not in pipeline_response.http_response.headers: pipeline_response.http_response.headers["azure-asyncoperation"] = status_url return _get_continuation_token(pipeline_response) @@ -60,8 +58,7 @@ async def begin_backup( use_managed_identity: Literal[True], continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[KeyVaultBackupResult]: - ... + ) -> AsyncLROPoller[KeyVaultBackupResult]: ... @overload async def begin_backup( @@ -71,8 +68,7 @@ async def begin_backup( sas_token: str, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[KeyVaultBackupResult]: - ... + ) -> AsyncLROPoller[KeyVaultBackupResult]: ... # Disabling pylint checks because they don't correctly handle overloads @distributed_trace_async @@ -138,8 +134,7 @@ async def begin_restore( key_name: Optional[str] = None, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[None]: - ... + ) -> AsyncLROPoller[None]: ... @overload async def begin_restore( @@ -150,8 +145,7 @@ async def begin_restore( key_name: Optional[str] = None, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[None]: - ... + ) -> AsyncLROPoller[None]: ... # Disabling pylint checks because they don't correctly handle overloads @distributed_trace_async @@ -244,8 +238,7 @@ async def begin_pre_backup( use_managed_identity: Literal[True], continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[None]: - ... + ) -> AsyncLROPoller[None]: ... @overload async def begin_pre_backup( @@ -255,8 +248,7 @@ async def begin_pre_backup( sas_token: str, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[None]: - ... + ) -> AsyncLROPoller[None]: ... @distributed_trace_async async def begin_pre_backup( # pylint: disable=docstring-keyword-should-match-keyword-only @@ -312,8 +304,7 @@ async def begin_pre_restore( use_managed_identity: Literal[True], continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[None]: - ... + ) -> AsyncLROPoller[None]: ... @overload async def begin_pre_restore( @@ -323,8 +314,7 @@ async def begin_pre_restore( sas_token: str, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[None]: - ... + ) -> AsyncLROPoller[None]: ... @distributed_trace_async async def begin_pre_restore( # pylint: disable=docstring-keyword-should-match-keyword-only diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_settings_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_settings_client.py index d1a96bb0bc66..3a6eba9acd30 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_settings_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_settings_client.py @@ -28,6 +28,7 @@ class KeyVaultSettingsClient(AsyncKeyVaultClientBase): :keyword bool verify_challenge_resource: Whether to verify the authentication challenge resource matches the Key Vault or Managed HSM domain. Defaults to True. """ + # pylint:disable=protected-access @distributed_trace_async @@ -78,11 +79,7 @@ async def update_setting(self, setting: KeyVaultSetting, **kwargs: Any) -> KeyVa :raises ~azure.core.exceptions.HttpResponseError: """ parameters = UpdateSettingRequest(value=setting.value) - result = await self._client.update_setting( - setting_name=setting.name, - parameters=parameters, - **kwargs - ) + result = await self._client.update_setting(setting_name=setting.name, parameters=parameters, **kwargs) return KeyVaultSetting._from_generated(result) async def __aenter__(self) -> "KeyVaultSettingsClient": diff --git a/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations.py b/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations.py index 4a88c14046d9..dec69877910c 100644 --- a/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations.py +++ b/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -12,7 +13,7 @@ # # 3. Set environment variable MANAGED_HSM_URL with the URL of your managed HSM and AZURE_CLIENT_ID with the ID of a # service principal -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # @@ -79,8 +80,7 @@ # [START update_a_role_definition] new_permissions = [ KeyVaultPermission( - data_actions=[KeyVaultDataAction.READ_HSM_KEY], - not_data_actions=[KeyVaultDataAction.CREATE_HSM_KEY] + data_actions=[KeyVaultDataAction.READ_HSM_KEY], not_data_actions=[KeyVaultDataAction.CREATE_HSM_KEY] ) ] unique_definition_name = role_definition.name diff --git a/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations_async.py b/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations_async.py index 4238ea22245d..f11d469b8ebf 100644 --- a/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations_async.py +++ b/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -16,7 +17,7 @@ # 2. azure-keyvault-administration and azure-identity libraries (pip install these) # # 3. Set environment variable MANAGED_HSM_URL with the URL of your managed HSM -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # @@ -34,6 +35,7 @@ # 5. Delete a role definition (delete_role_definition) # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): MANAGED_HSM_URL = os.environ["MANAGED_HSM_URL"] @@ -41,7 +43,7 @@ async def run_sample(): # Here we use the DefaultAzureCredential, but any azure-identity credential can be used. credential = DefaultAzureCredential() client = KeyVaultAccessControlClient(vault_url=MANAGED_HSM_URL, credential=credential) - + # Let's first create a custom role definition. This role permits creating keys in a Managed HSM. # We'll provide a friendly role name, and let a unique role definition name (a GUID) be generated for us. print("\n.. Create a role definition") @@ -57,8 +59,7 @@ async def run_sample(): print("\n.. Update a role definition") new_permissions = [ KeyVaultPermission( - data_actions=[KeyVaultDataAction.READ_HSM_KEY], - not_data_actions=[KeyVaultDataAction.CREATE_HSM_KEY] + data_actions=[KeyVaultDataAction.READ_HSM_KEY], not_data_actions=[KeyVaultDataAction.CREATE_HSM_KEY] ) ] unique_definition_name = role_definition.name diff --git a/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations.py b/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations.py index a36305134280..582b250e5f5f 100644 --- a/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations.py +++ b/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations.py @@ -17,7 +17,7 @@ # 4. A user-assigned managed identity that has access to your managed HSM. For more information about how to create a # user-assigned managed identity, refer to # https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/overview -# +# # 5. A storage account, that your managed identity has access to, containing a blob storage container # (See https://learn.microsoft.com/azure/storage/blobs/storage-blobs-introduction) # diff --git a/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations_async.py b/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations_async.py index 1cba4d1b11ae..5d5318769300 100644 --- a/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations_async.py +++ b/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations_async.py @@ -19,7 +19,7 @@ # 4. A user-assigned managed identity that has access to your managed HSM. For more information about how to create a # user-assigned managed identity, refer to # https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/overview -# +# # 5. A storage account, that your managed identity has access to, containing a blob storage container # (See https://learn.microsoft.com/azure/storage/blobs/storage-blobs-introduction) # @@ -36,6 +36,7 @@ # 2. Perform a full restore (begin_restore) # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): MANAGED_HSM_URL = os.environ["MANAGED_HSM_URL"] CONTAINER_URL = os.environ["CONTAINER_URL"] @@ -45,7 +46,7 @@ async def run_sample(): # Here we use the DefaultAzureCredential, but any azure-identity credential can be used. credential = ManagedIdentityCredential(client_id=MANAGED_IDENTITY_CLIENT_ID) client = KeyVaultBackupClient(vault_url=MANAGED_HSM_URL, credential=credential) - + # Let's back up the vault with begin_backup, which returns a poller. Calling result() on the poller will return # a KeyVaultBackupResult that contains the URL of the backup after the operation completes. Calling wait() on # the poller will wait until the operation is complete. diff --git a/sdk/keyvault/azure-keyvault-administration/samples/settings_operations.py b/sdk/keyvault/azure-keyvault-administration/samples/settings_operations.py index 390e0d7b4e00..68461b4a3e3d 100644 --- a/sdk/keyvault/azure-keyvault-administration/samples/settings_operations.py +++ b/sdk/keyvault/azure-keyvault-administration/samples/settings_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -13,7 +14,7 @@ # 2. azure-keyvault-administration and azure-identity libraries (pip install these) # # 3. Set environment variable MANAGED_HSM_URL with the URL of your managed HSM -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # diff --git a/sdk/keyvault/azure-keyvault-administration/samples/settings_operations_async.py b/sdk/keyvault/azure-keyvault-administration/samples/settings_operations_async.py index 77a7070a7f44..a400f4889d12 100644 --- a/sdk/keyvault/azure-keyvault-administration/samples/settings_operations_async.py +++ b/sdk/keyvault/azure-keyvault-administration/samples/settings_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -16,7 +17,7 @@ # 2. azure-keyvault-administration and azure-identity libraries (pip install these) # # 3. Set environment variable MANAGED_HSM_URL with the URL of your managed HSM -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # @@ -28,6 +29,7 @@ # 2. Update a setting (update_setting) # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): MANAGED_HSM_URL = os.environ["MANAGED_HSM_URL"] diff --git a/sdk/keyvault/azure-keyvault-administration/tests/_async_test_case.py b/sdk/keyvault/azure-keyvault-administration/tests/_async_test_case.py index c7aa19e75988..ab9fac0ebe9d 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/_async_test_case.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/_async_test_case.py @@ -60,6 +60,7 @@ async def _preparer(test_class, api_version, **kwargs): async with client: await fn(test_class, client, **kwargs) + return _preparer def create_backup_client(self, **kwargs): @@ -82,6 +83,7 @@ async def _preparer(test_class, api_version, **kwargs): async with client: await fn(test_class, client, **kwargs) + return _preparer def create_backup_client(self, **kwargs): @@ -101,11 +103,11 @@ async def _preparer(test_class, api_version, **kwargs): async with client: await fn(test_class, client, **kwargs) + return _preparer def create_access_control_client(self, **kwargs): - from azure.keyvault.administration.aio import \ - KeyVaultAccessControlClient + from azure.keyvault.administration.aio import KeyVaultAccessControlClient credential = self.get_credential(KeyVaultAccessControlClient, is_async=True) return self.create_client_from_credential( @@ -121,11 +123,11 @@ async def _preparer(test_class, api_version, **kwargs): async with client: await fn(test_class, client, **kwargs) + return _preparer def create_access_control_client(self, **kwargs): - from azure.keyvault.administration.aio import \ - KeyVaultSettingsClient + from azure.keyvault.administration.aio import KeyVaultSettingsClient credential = self.get_credential(KeyVaultSettingsClient, is_async=True) return self.create_client_from_credential( diff --git a/sdk/keyvault/azure-keyvault-administration/tests/_test_case.py b/sdk/keyvault/azure-keyvault-administration/tests/_test_case.py index cb9d2edc39e7..fbeca4fd8ec7 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/_test_case.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/_test_case.py @@ -54,7 +54,7 @@ def _set_mgmt_settings_real_values(self): class KeyVaultBackupClientPreparer(BaseClientPreparer): def __init__(self, **kwargs) -> None: - super().__init__(**kwargs) + super().__init__(**kwargs) def __call__(self, fn): def _preparer(test_class, api_version, **kwargs): @@ -65,6 +65,7 @@ def _preparer(test_class, api_version, **kwargs): with client: fn(test_class, client, **kwargs) + return _preparer def create_backup_client(self, **kwargs): @@ -78,7 +79,7 @@ def create_backup_client(self, **kwargs): class KeyVaultBackupClientSasPreparer(BaseClientPreparer): def __init__(self, **kwargs) -> None: - super().__init__(**kwargs) + super().__init__(**kwargs) def __call__(self, fn): def _preparer(test_class, api_version, **kwargs): @@ -90,6 +91,7 @@ def _preparer(test_class, api_version, **kwargs): with client: fn(test_class, client, **kwargs) + return _preparer def create_backup_client(self, **kwargs): @@ -112,6 +114,7 @@ def _preparer(test_class, api_version, **kwargs): with client: fn(test_class, client, **kwargs) + return _preparer def create_access_control_client(self, **kwargs): @@ -134,6 +137,7 @@ def _preparer(test_class, api_version, **kwargs): with client: fn(test_class, client, **kwargs) + return _preparer def create_settings_client(self, **kwargs): diff --git a/sdk/keyvault/azure-keyvault-administration/tests/conftest.py b/sdk/keyvault/azure-keyvault-administration/tests/conftest.py index 66f14f669a46..872fdb9d38f7 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/conftest.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/conftest.py @@ -16,7 +16,8 @@ remove_batch_sanitizers, ) -os.environ['PYTHONHASHSEED'] = '0' +os.environ["PYTHONHASHSEED"] = "0" + @pytest.fixture(scope="session", autouse=True) def add_sanitizers(test_proxy): @@ -24,22 +25,22 @@ def add_sanitizers(test_proxy): azure_keyvault_url = azure_keyvault_url.rstrip("/") keyvault_tenant_id = os.getenv("KEYVAULT_TENANT_ID", "keyvault_tenant_id") keyvault_subscription_id = os.getenv("KEYVAULT_SUBSCRIPTION_ID", "keyvault_subscription_id") - azure_managedhsm_url = os.environ.get("AZURE_MANAGEDHSM_URL","https://Sanitized.managedhsm.azure.net") + azure_managedhsm_url = os.environ.get("AZURE_MANAGEDHSM_URL", "https://Sanitized.managedhsm.azure.net") azure_managedhsm_url = azure_managedhsm_url.rstrip("/") - azure_attestation_uri = os.environ.get("AZURE_KEYVAULT_ATTESTATION_URL","https://Sanitized.azurewebsites.net") - azure_attestation_uri = azure_attestation_uri.rstrip('/') + azure_attestation_uri = os.environ.get("AZURE_KEYVAULT_ATTESTATION_URL", "https://Sanitized.azurewebsites.net") + azure_attestation_uri = azure_attestation_uri.rstrip("/") storage_url = os.environ.get("BLOB_STORAGE_URL", "https://Sanitized.blob.core.windows.net") client_id = os.environ.get("KEYVAULT_CLIENT_ID", "service-principal-id") - sas_token = os.environ.get("BLOB_STORAGE_SAS_TOKEN","fake-sas") + sas_token = os.environ.get("BLOB_STORAGE_SAS_TOKEN", "fake-sas") add_general_string_sanitizer(target=azure_keyvault_url, value="https://Sanitized.vault.azure.net") add_general_string_sanitizer(target=keyvault_tenant_id, value="00000000-0000-0000-0000-000000000000") add_general_string_sanitizer(target=keyvault_subscription_id, value="00000000-0000-0000-0000-000000000000") - add_general_string_sanitizer(target=azure_managedhsm_url,value="https://Sanitized.managedhsm.azure.net") - add_general_string_sanitizer(target=azure_attestation_uri,value="https://Sanitized.azurewebsites.net") + add_general_string_sanitizer(target=azure_managedhsm_url, value="https://Sanitized.managedhsm.azure.net") + add_general_string_sanitizer(target=azure_attestation_uri, value="https://Sanitized.azurewebsites.net") add_general_string_sanitizer(target=storage_url, value="https://Sanitized.blob.core.windows.net") add_general_string_sanitizer(target=sas_token, value="fake-sas") - add_general_string_sanitizer(target=client_id, value = "service-principal-id") + add_general_string_sanitizer(target=client_id, value="service-principal-id") # Sanitize API versions of `azure-keyvault-keys` requests add_uri_regex_sanitizer( regex="keys/([^/]*)/create\\?api-version=(\\S*)", value="keys/$1/create?api-version=sanitized" diff --git a/sdk/keyvault/azure-keyvault-administration/tests/perfstress_tests/get_role_definition.py b/sdk/keyvault/azure-keyvault-administration/tests/perfstress_tests/get_role_definition.py index 80730f6ae7d8..dd5dfb5a2add 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/perfstress_tests/get_role_definition.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/perfstress_tests/get_role_definition.py @@ -8,10 +8,10 @@ from azure.identity import DefaultAzureCredential from azure.identity.aio import DefaultAzureCredential as AsyncDefaultAzureCredential from azure.keyvault.administration import ( - KeyVaultAccessControlClient, + KeyVaultAccessControlClient, KeyVaultDataAction, KeyVaultPermission, - KeyVaultRoleScope, + KeyVaultRoleScope, ) from azure.keyvault.administration.aio import KeyVaultAccessControlClient as AsyncKeyVaultAccessControlClient @@ -32,7 +32,7 @@ def __init__(self, arguments): self.role_name = uuid.uuid4() self.scope = KeyVaultRoleScope.GLOBAL self.permissions = [KeyVaultPermission(data_actions=[KeyVaultDataAction.CREATE_HSM_KEY])] - + async def global_setup(self): """The global setup is run only once.""" await super().global_setup() @@ -42,7 +42,7 @@ async def global_cleanup(self): """The global cleanup is run only once.""" await self.async_client.delete_role_definition(scope=self.scope, name=self.role_name) await super().global_cleanup() - + async def close(self): """This is run after cleanup.""" await self.async_client.close() diff --git a/sdk/keyvault/azure-keyvault-administration/tests/test_access_control.py b/sdk/keyvault/azure-keyvault-administration/tests/test_access_control.py index eae2f81bb7ab..55c4d4619a9c 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/test_access_control.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/test_access_control.py @@ -47,11 +47,7 @@ def test_role_definitions(self, client, **kwargs): permissions = [KeyVaultPermission(data_actions=[KeyVaultDataAction.READ_HSM_KEY])] created_definition = client.set_role_definition( - scope=scope, - name=definition_name, - role_name=role_name, - description="test", - permissions=permissions + scope=scope, name=definition_name, role_name=role_name, description="test", permissions=permissions ) assert "/" in created_definition.assignable_scopes assert created_definition.role_name == role_name @@ -61,9 +57,7 @@ def test_role_definitions(self, client, **kwargs): assert created_definition.permissions[0].data_actions == [KeyVaultDataAction.READ_HSM_KEY] assert created_definition.assignable_scopes == [KeyVaultRoleScope.GLOBAL] # update custom role definition - permissions = [ - KeyVaultPermission(data_actions=[], not_data_actions=[KeyVaultDataAction.READ_HSM_KEY]) - ] + permissions = [KeyVaultPermission(data_actions=[], not_data_actions=[KeyVaultDataAction.READ_HSM_KEY])] role_name2 = self.get_resource_name("role-name2") updated_definition = client.set_role_definition( scope=scope, name=definition_name, role_name=role_name2, permissions=permissions @@ -106,14 +100,14 @@ def test_role_assignment(self, client, **kwargs): created = client.create_role_assignment(scope, definition.id, principal_id, name=name) assert created.name == name - #assert created.properties.principal_id == principal_id + # assert created.properties.principal_id == principal_id assert created.properties.role_definition_id == definition.id assert created.properties.scope == scope # should be able to get the new assignment got = client.get_role_assignment(scope, name) assert got.name == name - #assert got.properties.principal_id == principal_id + # assert got.properties.principal_id == principal_id assert got.properties.role_definition_id == definition.id assert got.properties.scope == scope diff --git a/sdk/keyvault/azure-keyvault-administration/tests/test_access_control_async.py b/sdk/keyvault/azure-keyvault-administration/tests/test_access_control_async.py index cb7ae8428a68..3aa89f8d793e 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/test_access_control_async.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/test_access_control_async.py @@ -7,7 +7,7 @@ import uuid import pytest -from azure.keyvault.administration import KeyVaultDataAction, KeyVaultPermission,KeyVaultRoleScope +from azure.keyvault.administration import KeyVaultDataAction, KeyVaultPermission, KeyVaultRoleScope from devtools_testutils import add_general_regex_sanitizer, set_bodiless_matcher from devtools_testutils.aio import recorded_by_proxy_async @@ -52,11 +52,7 @@ async def test_role_definitions(self, client, **kwargs): add_general_regex_sanitizer(function_scoped=True, regex=definition_name, value="definition-name") permissions = [KeyVaultPermission(data_actions=[KeyVaultDataAction.READ_HSM_KEY])] created_definition = await client.set_role_definition( - scope=scope, - name=definition_name, - role_name=role_name, - description="test", - permissions=permissions + scope=scope, name=definition_name, role_name=role_name, description="test", permissions=permissions ) assert "/" in created_definition.assignable_scopes assert created_definition.role_name == role_name @@ -67,9 +63,7 @@ async def test_role_definitions(self, client, **kwargs): assert created_definition.assignable_scopes == [KeyVaultRoleScope.GLOBAL] # update custom role definition - permissions = [ - KeyVaultPermission(data_actions=[], not_data_actions=[KeyVaultDataAction.READ_HSM_KEY]) - ] + permissions = [KeyVaultPermission(data_actions=[], not_data_actions=[KeyVaultDataAction.READ_HSM_KEY])] role_name2 = self.get_resource_name("role-name2") updated_definition = await client.set_role_definition( scope=scope, name=definition_name, role_name=role_name2, permissions=permissions @@ -96,11 +90,10 @@ async def test_role_definitions(self, client, **kwargs): await client.delete_role_definition(scope, definition_name) async for d in client.list_role_definitions(scope): - assert (d.id != definition.id), "the role definition should have been deleted" + assert d.id != definition.id, "the role definition should have been deleted" if self.is_live: await asyncio.sleep(60) # additional waiting to avoid conflicts with resources in other tests - @pytest.mark.asyncio @pytest.mark.parametrize("api_version", all_api_versions) @KeyVaultAccessControlClientPreparer() @@ -120,14 +113,14 @@ async def test_role_assignment(self, client, **kwargs): created = await client.create_role_assignment(scope, definition.id, principal_id, name=name) assert created.name == name - #assert created.properties.principal_id == principal_id + # assert created.properties.principal_id == principal_id assert created.properties.role_definition_id == definition.id assert created.properties.scope == scope # should be able to get the new assignment got = await client.get_role_assignment(scope, name) assert got.name == name - #assert got.properties.principal_id == principal_id + # assert got.properties.principal_id == principal_id assert got.properties.role_definition_id == definition.id assert got.properties.scope == scope diff --git a/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client.py b/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client.py index d5bd21ee0dfa..6f8f20155dc6 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client.py @@ -23,8 +23,9 @@ class TestBackupClientTests(KeyVaultTestCase): def create_key_client(self, vault_uri, **kwargs): from azure.keyvault.keys import KeyClient + credential = self.get_credential(KeyClient) - return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs ) + return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs) @pytest.mark.parametrize("api_version", only_default) @KeyVaultBackupClientPreparer() @@ -99,7 +100,6 @@ def test_selective_key_restore(self, client, **kwargs): key_name = self.get_resource_name("selective-restore-test-key") key_client.create_rsa_key(key_name) - # backup the vault container_uri = kwargs.pop("container_uri") backup_poller = client.begin_backup(container_uri, use_managed_identity=True) @@ -188,7 +188,9 @@ def test_backup_restore_sas(self, client: KeyVaultBackupClient, **kwargs): sas_token = kwargs.pop("sas_token") if self.is_live and not sas_token: - pytest.skip("SAS token is required for live tests. Please set the BLOB_STORAGE_SAS_TOKEN environment variable.") + pytest.skip( + "SAS token is required for live tests. Please set the BLOB_STORAGE_SAS_TOKEN environment variable." + ) client.begin_pre_backup(container_uri, sas_token=sas_token).wait() backup_poller = client.begin_backup(container_uri, sas_token) # Test positional SAS token for backwards compat diff --git a/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client_async.py b/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client_async.py index 7f6e6084500a..c35dd17a3863 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client_async.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client_async.py @@ -20,9 +20,10 @@ class TestBackupClientTests(KeyVaultTestCase): def create_key_client(self, vault_uri, **kwargs): - from azure.keyvault.keys.aio import KeyClient - credential = self.get_credential(KeyClient, is_async=True) - return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs ) + from azure.keyvault.keys.aio import KeyClient + + credential = self.get_credential(KeyClient, is_async=True) + return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs) @pytest.mark.asyncio @pytest.mark.parametrize("api_version", only_default) @@ -139,7 +140,7 @@ async def test_backup_client_polling(self, client, **kwargs): if self.is_live: assert backup_poller.status() == "InProgress" assert not backup_poller.done() or backup_poller.polling_method().finished() - #assert rehydrated.status() == "InProgress" + # assert rehydrated.status() == "InProgress" assert not rehydrated.done() or rehydrated.polling_method().finished() backup_operation = await backup_poller.result() @@ -169,7 +170,7 @@ async def test_backup_client_polling(self, client, **kwargs): if self.is_live: assert restore_poller.status() == "InProgress" assert not restore_poller.done() or restore_poller.polling_method().finished() - #assert rehydrated.status() == "InProgress" + # assert rehydrated.status() == "InProgress" assert not rehydrated.done() or rehydrated.polling_method().finished() await rehydrated.wait() @@ -190,7 +191,9 @@ async def test_backup_restore_sas(self, client, **kwargs): sas_token = kwargs.pop("sas_token") if self.is_live and not sas_token: - pytest.skip("SAS token is required for live tests. Please set the BLOB_STORAGE_SAS_TOKEN environment variable.") + pytest.skip( + "SAS token is required for live tests. Please set the BLOB_STORAGE_SAS_TOKEN environment variable." + ) check_poller = await client.begin_pre_backup(container_uri, sas_token=sas_token) await check_poller.wait() diff --git a/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration.py b/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration.py index ddcb3d8876d1..4bb463ad28be 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration.py @@ -18,8 +18,9 @@ class TestExamplesTests(KeyVaultTestCase): def create_key_client(self, vault_uri, **kwargs): from azure.keyvault.keys import KeyClient + credential = self.get_credential(KeyClient) - return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs ) + return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs) @pytest.mark.parametrize("api_version", only_default) @KeyVaultBackupClientPreparer() @@ -63,7 +64,7 @@ def test_example_backup_and_restore(self, client, **kwargs): @pytest.mark.parametrize("api_version", only_default) @KeyVaultBackupClientPreparer() @recorded_by_proxy - def test_example_selective_key_restore(self, client,**kwargs): + def test_example_selective_key_restore(self, client, **kwargs): set_bodiless_matcher() # create a key to selectively restore managed_hsm_url = kwargs.pop("managed_hsm_url") diff --git a/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration_async.py b/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration_async.py index 1268425b5ea6..c601e60c38fa 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration_async.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration_async.py @@ -20,8 +20,9 @@ class TestExamplesTests(KeyVaultTestCase): def create_key_client(self, vault_uri, **kwargs): from azure.keyvault.keys.aio import KeyClient + credential = self.get_credential(KeyClient, is_async=True) - return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs ) + return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs) @pytest.mark.asyncio @pytest.mark.parametrize("api_version", only_default) From 22cb183da37551959eccbe53947e7af451cf224c Mon Sep 17 00:00:00 2001 From: Nicola Camillucci Date: Tue, 28 Apr 2026 17:25:24 +0100 Subject: [PATCH 04/11] Added back manifest and pyproject changes --- .../azure-keyvault-administration/MANIFEST.in | 3 +- .../azure-keyvault-administration/setup.py | 14 +++-- sdk/keyvault/azure-keyvault-keys/MANIFEST.in | 3 +- .../azure-keyvault-keys/pyproject.toml | 59 ++++++++++--------- 4 files changed, 43 insertions(+), 36 deletions(-) diff --git a/sdk/keyvault/azure-keyvault-administration/MANIFEST.in b/sdk/keyvault/azure-keyvault-administration/MANIFEST.in index 903a5953d673..0cc8058bae0b 100644 --- a/sdk/keyvault/azure-keyvault-administration/MANIFEST.in +++ b/sdk/keyvault/azure-keyvault-administration/MANIFEST.in @@ -1,7 +1,8 @@ include *.md include LICENSE -include azure/keyvault/administration/py.typed +include azure/keyvault/administration/_generated/py.typed recursive-include tests *.py recursive-include samples *.py *.md include azure/__init__.py include azure/keyvault/__init__.py +include azure/keyvault/administration/__init__.py diff --git a/sdk/keyvault/azure-keyvault-administration/setup.py b/sdk/keyvault/azure-keyvault-administration/setup.py index 0edb70640803..5bdb286c4be9 100644 --- a/sdk/keyvault/azure-keyvault-administration/setup.py +++ b/sdk/keyvault/azure-keyvault-administration/setup.py @@ -14,9 +14,10 @@ PACKAGE_NAME = "azure-keyvault-administration" PACKAGE_PPRINT_NAME = "Key Vault Administration" +PACKAGE_NAMESPACE = "azure.keyvault.administration._generated" -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") +# a.b.c => a/b/c +package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") # Version extraction inspired from 'requests' with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: @@ -29,7 +30,6 @@ setup( name=PACKAGE_NAME, version=version, - include_package_data=True, description="Microsoft Corporation {} Client Library for Python".format(PACKAGE_PPRINT_NAME), long_description=open("README.md", "r").read(), long_description_content_type="text/markdown", @@ -53,16 +53,20 @@ zip_safe=False, packages=find_packages( exclude=[ - "samples", "tests", # Exclude packages that will be covered by PEP420 or nspkg "azure", "azure.keyvault", + "azure.keyvault.administration", ] ), + include_package_data=True, + package_data={ + "azure.keyvault.administration._generated": ["py.typed"], + }, install_requires=[ "isodate>=0.6.1", - "azure-core>=1.38.0", + "azure-core>=1.37.0", "typing-extensions>=4.6.0", ], python_requires=">=3.9", diff --git a/sdk/keyvault/azure-keyvault-keys/MANIFEST.in b/sdk/keyvault/azure-keyvault-keys/MANIFEST.in index d4688a08c24e..7696bd6b2f38 100644 --- a/sdk/keyvault/azure-keyvault-keys/MANIFEST.in +++ b/sdk/keyvault/azure-keyvault-keys/MANIFEST.in @@ -1,7 +1,8 @@ include *.md include LICENSE -include azure/keyvault/keys/py.typed +include azure/keyvault/keys/_generated/py.typed recursive-include tests *.py recursive-include samples *.py *.md include azure/__init__.py include azure/keyvault/__init__.py +include azure/keyvault/keys/__init__.py diff --git a/sdk/keyvault/azure-keyvault-keys/pyproject.toml b/sdk/keyvault/azure-keyvault-keys/pyproject.toml index 7caa00c7dd7c..526d60282e9f 100644 --- a/sdk/keyvault/azure-keyvault-keys/pyproject.toml +++ b/sdk/keyvault/azure-keyvault-keys/pyproject.toml @@ -1,16 +1,21 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + [build-system] -requires = ["setuptools>=61.0.0", "wheel"] # Requires 61.0.0 for dynamic version +requires = ["setuptools>=77.0.3", "wheel"] build-backend = "setuptools.build_meta" [project] name = "azure-keyvault-keys" authors = [ - {name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com"}, + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, ] description = "Microsoft Corporation Azure Key Vault Keys Client Library for Python" -keywords = ["azure", "azure sdk"] -requires-python = ">=3.9" -license = {text = "MIT License"} +license = "MIT" classifiers = [ "Development Status :: 5 - Production/Stable", "Programming Language :: Python", @@ -21,25 +26,38 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", - "License :: OSI Approved :: MIT License", ] +requires-python = ">=3.9" +keywords = ["azure", "azure sdk"] + dependencies = [ - "azure-core>=1.31.0", - "cryptography>=2.1.4", "isodate>=0.6.1", + "azure-core>=1.37.0", "typing-extensions>=4.6.0", + "cryptography>=2.1.4", +] +dynamic = [ +"version", "readme" ] -dynamic = ["version", "readme"] [project.urls] -repository = "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk" +repository = "https://github.com/Azure/azure-sdk-for-python" [tool.setuptools.dynamic] -version = {attr = "azure.keyvault.keys._version.VERSION"} +version = {attr = "azure.keyvault.keys._generated._version.VERSION"} readme = {file = ["README.md", "CHANGELOG.md"], content-type = "text/markdown"} [tool.setuptools.packages.find] -exclude = ["samples*", "tests*", "azure", "azure.keyvault"] +exclude = [ + "tests*", + "generated_tests*", + "samples*", + "generated_samples*", + "doc*", + "azure", + "azure.keyvault", + "azure.keyvault.keys", +] [tool.setuptools.package-data] pytyped = ["py.typed"] @@ -47,23 +65,6 @@ pytyped = ["py.typed"] [tool.azure-sdk-build] pyright = false -[tool.uv.sources] -azure-core = { path = "../../core/azure-core" } -azure-keyvault-nspkg = { path = "../../nspkg/azure-keyvault-nspkg" } -azure-sdk-tools = { path = "../../../eng/tools/azure-sdk-tools" } - -[dependency-groups] -dev = [ - "aiohttp>=3.0", - "azure-core", - "azure-identity>=1.24.0", - "azure-keyvault-nspkg", - "azure-mgmt-keyvault==10.1.0", - "azure-sdk-tools", - "parameterized>=0.7.3", - "python-dateutil>=2.8.0", -] - [tool.azure-sdk-conda] in_bundle = true bundle_name = "azure-keyvault" From 02bd0ba7af866b8d5eaed8b89f003ad4f69e000c Mon Sep 17 00:00:00 2001 From: Nicola Camillucci Date: Wed, 29 Apr 2026 16:15:29 +0100 Subject: [PATCH 05/11] Updated default API version --- sdk/keyvault/azure-keyvault-administration/CHANGELOG.md | 2 ++ .../azure/keyvault/administration/_internal/client_base.py | 3 ++- sdk/keyvault/azure-keyvault-keys/CHANGELOG.md | 2 ++ .../azure/keyvault/keys/_shared/client_base.py | 3 ++- 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/sdk/keyvault/azure-keyvault-administration/CHANGELOG.md b/sdk/keyvault/azure-keyvault-administration/CHANGELOG.md index e35340edc081..bed9ac1a91ad 100644 --- a/sdk/keyvault/azure-keyvault-administration/CHANGELOG.md +++ b/sdk/keyvault/azure-keyvault-administration/CHANGELOG.md @@ -4,6 +4,8 @@ ### Features Added +- Added support for service API version `2025-07-01` + ### Breaking Changes - Changed the continuation token format. Continuation tokens generated by previous versions of diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/client_base.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/client_base.py index 77401fd15ba6..0ed49a8c55b3 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/client_base.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/client_base.py @@ -24,6 +24,7 @@ class ApiVersion(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Key Vault API versions supported by this package""" #: this is the default version + V2025_07_01 = "2025-07-01" V7_6 = "7.6" V7_5 = "7.5" V7_4 = "7.4" @@ -31,7 +32,7 @@ class ApiVersion(str, Enum, metaclass=CaseInsensitiveEnumMeta): V7_2 = "7.2" -DEFAULT_VERSION = ApiVersion.V7_6 +DEFAULT_VERSION = ApiVersion.V2025_07_01 _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False diff --git a/sdk/keyvault/azure-keyvault-keys/CHANGELOG.md b/sdk/keyvault/azure-keyvault-keys/CHANGELOG.md index a48cfd310419..ccefa5cca550 100644 --- a/sdk/keyvault/azure-keyvault-keys/CHANGELOG.md +++ b/sdk/keyvault/azure-keyvault-keys/CHANGELOG.md @@ -4,6 +4,8 @@ ### Features Added +- Added support for service API version `2025-07-01` + ### Breaking Changes ### Bugs Fixed diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_shared/client_base.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_shared/client_base.py index ff5d529d119f..9e721ef6cfe2 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_shared/client_base.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_shared/client_base.py @@ -24,6 +24,7 @@ class ApiVersion(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Key Vault API versions supported by this package""" #: this is the default version + V2025_07_01 = "2025-07-01" V7_6 = "7.6" V7_5 = "7.5" V7_4 = "7.4" @@ -34,7 +35,7 @@ class ApiVersion(str, Enum, metaclass=CaseInsensitiveEnumMeta): V2016_10_01 = "2016-10-01" -DEFAULT_VERSION = ApiVersion.V7_6 +DEFAULT_VERSION = ApiVersion.V2025_07_01 _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False From 7fac714c1ffd21177df960a7c15fd9121454796d Mon Sep 17 00:00:00 2001 From: Nicola Camillucci Date: Tue, 28 Apr 2026 17:28:47 +0100 Subject: [PATCH 06/11] Updated tests --- .../azure-keyvault-administration/assets.json | 2 +- sdk/keyvault/azure-keyvault-keys/assets.json | 2 +- sdk/keyvault/azure-keyvault-keys/tests/conftest.py | 8 ++++++-- .../tests/test_challenge_auth_async.py | 12 +++++++++++- 4 files changed, 19 insertions(+), 5 deletions(-) diff --git a/sdk/keyvault/azure-keyvault-administration/assets.json b/sdk/keyvault/azure-keyvault-administration/assets.json index 397dfc697ae6..36e1355ab6ab 100644 --- a/sdk/keyvault/azure-keyvault-administration/assets.json +++ b/sdk/keyvault/azure-keyvault-administration/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/keyvault/azure-keyvault-administration", - "Tag": "python/keyvault/azure-keyvault-administration_ab4ab43926" + "Tag": "python/keyvault/azure-keyvault-administration_007a803c2c" } diff --git a/sdk/keyvault/azure-keyvault-keys/assets.json b/sdk/keyvault/azure-keyvault-keys/assets.json index df42837dafed..ad02b6cc8fcc 100644 --- a/sdk/keyvault/azure-keyvault-keys/assets.json +++ b/sdk/keyvault/azure-keyvault-keys/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/keyvault/azure-keyvault-keys", - "Tag": "python/keyvault/azure-keyvault-keys_7741e21de3" + "Tag": "python/keyvault/azure-keyvault-keys_229759aadf" } diff --git a/sdk/keyvault/azure-keyvault-keys/tests/conftest.py b/sdk/keyvault/azure-keyvault-keys/tests/conftest.py index 8e6ecf471467..bdd0c0faf09d 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/conftest.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/conftest.py @@ -56,7 +56,7 @@ def add_sanitizers(test_proxy): add_general_string_sanitizer(target=azure_managedhsm_url, value="https://managedhsmvaultname.managedhsm.azure.net") add_general_string_sanitizer(target=azure_attestation_uri, value="https://fakeattestation.azurewebsites.net") add_oauth_response_sanitizer() - set_custom_default_matcher(compare_bodies=False, ignore_query_ordering=True) + set_custom_default_matcher(compare_bodies=False, ignore_query_ordering=True, ignored_headers="Accept") # Remove the following sanitizers since certain fields are needed in tests and are non-sensitive: # - AZSDK3430: $..id @@ -97,6 +97,10 @@ def immediate_return(_): @pytest.fixture(scope="session") def event_loop(request): - loop = asyncio.new_event_loop() + try: + loop = asyncio.get_event_loop() + except RuntimeError: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) yield loop loop.close() diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth_async.py b/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth_async.py index 76ccf739df16..ab1e6af8c3d6 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth_async.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth_async.py @@ -7,6 +7,7 @@ the challenge cache is global to the process. """ import asyncio +import functools from itertools import product import os import time @@ -30,7 +31,6 @@ from _shared.helpers_async import async_validating_transport from _shared.test_case_async import KeyVaultTestCase from test_challenge_auth import ( - empty_challenge_cache, get_random_url, add_url_port, CAE_CHALLENGE_RESPONSE, @@ -82,6 +82,16 @@ async def test_multitenant_authentication(self, client, is_hsm, **kwargs): os.environ.pop("AZURE_TENANT_ID") +def empty_challenge_cache(fn): + @functools.wraps(fn) + async def wrapper(**kwargs): + HttpChallengeCache.clear() + assert len(HttpChallengeCache._cache) == 0 + return await fn(**kwargs) + + return wrapper + + @pytest.mark.asyncio @empty_challenge_cache async def test_enforces_tls(): From 23d0b0be12b3f93ab4a6c5098a4a2d0e83a17d1a Mon Sep 17 00:00:00 2001 From: Nicola Camillucci Date: Fri, 1 May 2026 15:34:08 +0100 Subject: [PATCH 07/11] Reverted autogenerated changes --- sdk/keyvault/azure-keyvault-administration/setup.py | 6 +++--- sdk/keyvault/azure-keyvault-keys/pyproject.toml | 3 +-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/sdk/keyvault/azure-keyvault-administration/setup.py b/sdk/keyvault/azure-keyvault-administration/setup.py index 5bdb286c4be9..064846aa7331 100644 --- a/sdk/keyvault/azure-keyvault-administration/setup.py +++ b/sdk/keyvault/azure-keyvault-administration/setup.py @@ -14,7 +14,7 @@ PACKAGE_NAME = "azure-keyvault-administration" PACKAGE_PPRINT_NAME = "Key Vault Administration" -PACKAGE_NAMESPACE = "azure.keyvault.administration._generated" +PACKAGE_NAMESPACE = "azure.keyvault.administration" # a.b.c => a/b/c package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") @@ -53,11 +53,11 @@ zip_safe=False, packages=find_packages( exclude=[ + "samples", "tests", # Exclude packages that will be covered by PEP420 or nspkg "azure", "azure.keyvault", - "azure.keyvault.administration", ] ), include_package_data=True, @@ -66,7 +66,7 @@ }, install_requires=[ "isodate>=0.6.1", - "azure-core>=1.37.0", + "azure-core>=1.38.0", "typing-extensions>=4.6.0", ], python_requires=">=3.9", diff --git a/sdk/keyvault/azure-keyvault-keys/pyproject.toml b/sdk/keyvault/azure-keyvault-keys/pyproject.toml index 526d60282e9f..eba6df9c8680 100644 --- a/sdk/keyvault/azure-keyvault-keys/pyproject.toml +++ b/sdk/keyvault/azure-keyvault-keys/pyproject.toml @@ -44,7 +44,7 @@ dynamic = [ repository = "https://github.com/Azure/azure-sdk-for-python" [tool.setuptools.dynamic] -version = {attr = "azure.keyvault.keys._generated._version.VERSION"} +version = {attr = "azure.keyvault.keys._version.VERSION"} readme = {file = ["README.md", "CHANGELOG.md"], content-type = "text/markdown"} [tool.setuptools.packages.find] @@ -56,7 +56,6 @@ exclude = [ "doc*", "azure", "azure.keyvault", - "azure.keyvault.keys", ] [tool.setuptools.package-data] From af7dd3cb2bcbd247db1180cf139834a64b864e75 Mon Sep 17 00:00:00 2001 From: Nicola Camillucci Date: Mon, 4 May 2026 17:53:09 +0100 Subject: [PATCH 08/11] MyPy, PyLint, Snippets --- .../azure-keyvault-administration/README.md | 3 +-- .../azure/keyvault/keys/_client.py | 4 ++-- .../azure/keyvault/keys/_models.py | 9 ++++----- .../azure/keyvault/keys/crypto/_models.py | 15 +++++++++------ 4 files changed, 16 insertions(+), 15 deletions(-) diff --git a/sdk/keyvault/azure-keyvault-administration/README.md b/sdk/keyvault/azure-keyvault-administration/README.md index aa8866bf5f9b..d63f617a3cc4 100644 --- a/sdk/keyvault/azure-keyvault-administration/README.md +++ b/sdk/keyvault/azure-keyvault-administration/README.md @@ -193,8 +193,7 @@ role_definition = client.set_role_definition(scope=scope, role_name=role_name, p ```python new_permissions = [ KeyVaultPermission( - data_actions=[KeyVaultDataAction.READ_HSM_KEY], - not_data_actions=[KeyVaultDataAction.CREATE_HSM_KEY] + data_actions=[KeyVaultDataAction.READ_HSM_KEY], not_data_actions=[KeyVaultDataAction.CREATE_HSM_KEY] ) ] unique_definition_name = role_definition.name diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_client.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_client.py index d54c2457648e..b1c2eb3a4b93 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_client.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_client.py @@ -399,9 +399,9 @@ def create_oct_key( ) @distributed_trace - def begin_delete_key( + def begin_delete_key( # pylint:disable=bad-option-value,delete-operation-wrong-return-type self, name: str, **kwargs: Any - ) -> LROPoller[DeletedKey]: # pylint:disable=bad-option-value,delete-operation-wrong-return-type + ) -> LROPoller[DeletedKey]: """Delete all versions of a key and its cryptographic material. Requires keys/delete permission. When this method returns Key Vault has begun deleting the key. Deletion may diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_models.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_models.py index bff16a7210b9..fb4255c78129 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_models.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_models.py @@ -313,9 +313,8 @@ def attestation(self) -> Optional[KeyAttestation]: # attestation was added in 7.6-preview.2 if self._attributes: attestation = getattr(self._attributes, "attestation", None) - return ( - KeyAttestation._from_generated(attestation=attestation) if attestation else None - ) # pylint:disable=protected-access + if attestation: + return KeyAttestation._from_generated(attestation=attestation) # pylint:disable=protected-access return None @@ -414,8 +413,8 @@ def _from_generated(cls, policy: "_models.KeyRotationPolicy") -> "KeyRotationPol [] if policy.lifetime_actions is None else [ - KeyRotationLifetimeAction._from_generated(action) - for action in policy.lifetime_actions # pylint:disable=protected-access + KeyRotationLifetimeAction._from_generated(action) # pylint:disable=protected-access + for action in policy.lifetime_actions ] ) if policy.attributes: diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py index b39a8ca064c4..ac214bbb262e 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py @@ -240,8 +240,11 @@ def verify( if not result.is_valid: raise InvalidSignature(f"The provided signature '{signature!r}' is invalid.") - def recover_data_from_signature( - self, signature: bytes, padding: AsymmetricPadding, algorithm: Optional[HashAlgorithm] + def recover_data_from_signature( # type: ignore[override] # Parameter subset + self, + signature: bytes, + padding: AsymmetricPadding, + algorithm: Optional[HashAlgorithm], ) -> bytes: # pylint: disable=line-too-long """Recovers the signed data from the signature. Only supported with `cryptography` version 3.3 and above. @@ -327,7 +330,7 @@ def __deepcopy__(self, memo: dict) -> KeyVaultRSAPublicKey: """ return self - def verifier(# pylint:disable=docstring-missing-param,docstring-missing-return,docstring-missing-rtype + def verifier( # pylint:disable=docstring-missing-param,docstring-missing-return,docstring-missing-rtype self, signature: bytes, padding: AsymmetricPadding, algorithm: HashAlgorithm ) -> NoReturn: """Not implemented. This method was deprecated in `cryptography` 2.0 and removed in 37.0.0.""" @@ -391,7 +394,7 @@ def public_key(self) -> KeyVaultRSAPublicKey: """ return KeyVaultRSAPublicKey(self._client, self._key) - def sign( + def sign( # type: ignore[override] # Parameter subset self, data: bytes, padding: AsymmetricPadding, @@ -412,8 +415,8 @@ def sign( :returns: The signature, as bytes. :rtype: bytes """ - if isinstance(algorithm, Prehashed): - raise ValueError("`Prehashed` algorithms are unsupported. Please provide a `HashAlgorithm` instead.") + if not isinstance(algorithm, HashAlgorithm): + raise ValueError("Only `HashAlgorithm`s are supported. Please provide a `HashAlgorithm` instead.") mapped_algorithm = get_signature_algorithm(padding, algorithm) digest = Hash(algorithm) digest.update(data) From 6178eb15270e494856dadc2d699efe6b3c38a7a0 Mon Sep 17 00:00:00 2001 From: Nicola Camillucci Date: Fri, 1 May 2026 15:34:15 +0100 Subject: [PATCH 09/11] Updated changelog --- sdk/keyvault/azure-keyvault-administration/CHANGELOG.md | 2 +- sdk/keyvault/azure-keyvault-keys/CHANGELOG.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/keyvault/azure-keyvault-administration/CHANGELOG.md b/sdk/keyvault/azure-keyvault-administration/CHANGELOG.md index bed9ac1a91ad..85633b393451 100644 --- a/sdk/keyvault/azure-keyvault-administration/CHANGELOG.md +++ b/sdk/keyvault/azure-keyvault-administration/CHANGELOG.md @@ -4,7 +4,7 @@ ### Features Added -- Added support for service API version `2025-07-01` +- Added support for service API version `2025-07-01` [#46716](https://github.com/Azure/azure-sdk-for-python/pull/46716) ### Breaking Changes diff --git a/sdk/keyvault/azure-keyvault-keys/CHANGELOG.md b/sdk/keyvault/azure-keyvault-keys/CHANGELOG.md index ccefa5cca550..eaf17ab9eed7 100644 --- a/sdk/keyvault/azure-keyvault-keys/CHANGELOG.md +++ b/sdk/keyvault/azure-keyvault-keys/CHANGELOG.md @@ -4,7 +4,7 @@ ### Features Added -- Added support for service API version `2025-07-01` +- Added support for service API version `2025-07-01` [#46716](https://github.com/Azure/azure-sdk-for-python/pull/46716) ### Breaking Changes From 4e4b0a81dd60f5d6171432227796e2e517aa65e0 Mon Sep 17 00:00:00 2001 From: Nicola Camillucci Date: Tue, 5 May 2026 13:38:14 +0100 Subject: [PATCH 10/11] Applied copilot recommendations --- sdk/keyvault/azure-keyvault-administration/CHANGELOG.md | 1 + sdk/keyvault/azure-keyvault-administration/MANIFEST.in | 2 +- sdk/keyvault/azure-keyvault-administration/setup.py | 2 +- sdk/keyvault/azure-keyvault-keys/CHANGELOG.md | 2 ++ sdk/keyvault/azure-keyvault-keys/MANIFEST.in | 2 +- 5 files changed, 6 insertions(+), 3 deletions(-) diff --git a/sdk/keyvault/azure-keyvault-administration/CHANGELOG.md b/sdk/keyvault/azure-keyvault-administration/CHANGELOG.md index 85633b393451..908b367c0d05 100644 --- a/sdk/keyvault/azure-keyvault-administration/CHANGELOG.md +++ b/sdk/keyvault/azure-keyvault-administration/CHANGELOG.md @@ -17,6 +17,7 @@ ### Other Changes - Updated minimum `azure-core` version to 1.38.0 +- Key Vault API version `2025-07-01` is now the default ## 4.6.0 (2025-06-16) diff --git a/sdk/keyvault/azure-keyvault-administration/MANIFEST.in b/sdk/keyvault/azure-keyvault-administration/MANIFEST.in index 0cc8058bae0b..8ab5303db079 100644 --- a/sdk/keyvault/azure-keyvault-administration/MANIFEST.in +++ b/sdk/keyvault/azure-keyvault-administration/MANIFEST.in @@ -1,6 +1,6 @@ include *.md include LICENSE -include azure/keyvault/administration/_generated/py.typed +include azure/keyvault/administration/py.typed recursive-include tests *.py recursive-include samples *.py *.md include azure/__init__.py diff --git a/sdk/keyvault/azure-keyvault-administration/setup.py b/sdk/keyvault/azure-keyvault-administration/setup.py index 064846aa7331..4450e0daaa80 100644 --- a/sdk/keyvault/azure-keyvault-administration/setup.py +++ b/sdk/keyvault/azure-keyvault-administration/setup.py @@ -62,7 +62,7 @@ ), include_package_data=True, package_data={ - "azure.keyvault.administration._generated": ["py.typed"], + "azure.keyvault.administration": ["py.typed"], }, install_requires=[ "isodate>=0.6.1", diff --git a/sdk/keyvault/azure-keyvault-keys/CHANGELOG.md b/sdk/keyvault/azure-keyvault-keys/CHANGELOG.md index eaf17ab9eed7..b691c5fe69af 100644 --- a/sdk/keyvault/azure-keyvault-keys/CHANGELOG.md +++ b/sdk/keyvault/azure-keyvault-keys/CHANGELOG.md @@ -12,6 +12,8 @@ ### Other Changes +- Key Vault API version `2025-07-01` is now the default + ## 4.11.0 (2025-06-16) ### Features Added diff --git a/sdk/keyvault/azure-keyvault-keys/MANIFEST.in b/sdk/keyvault/azure-keyvault-keys/MANIFEST.in index 7696bd6b2f38..9bd7188bf0b2 100644 --- a/sdk/keyvault/azure-keyvault-keys/MANIFEST.in +++ b/sdk/keyvault/azure-keyvault-keys/MANIFEST.in @@ -1,6 +1,6 @@ include *.md include LICENSE -include azure/keyvault/keys/_generated/py.typed +include azure/keyvault/keys/py.typed recursive-include tests *.py recursive-include samples *.py *.md include azure/__init__.py From 08dadcb113f82b13a5221e74dccda15b65dd097f Mon Sep 17 00:00:00 2001 From: Nicola Camillucci Date: Tue, 5 May 2026 19:58:51 +0100 Subject: [PATCH 11/11] Minor revert --- .../azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py | 4 ++-- sdk/keyvault/azure-keyvault-keys/dev_requirements.txt | 2 +- sdk/keyvault/azure-keyvault-keys/pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py index ac214bbb262e..d65d152f6240 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py @@ -415,8 +415,8 @@ def sign( # type: ignore[override] # Parameter subset :returns: The signature, as bytes. :rtype: bytes """ - if not isinstance(algorithm, HashAlgorithm): - raise ValueError("Only `HashAlgorithm`s are supported. Please provide a `HashAlgorithm` instead.") + if isinstance(algorithm, Prehashed): + raise ValueError("`Prehashed` algorithms are unsupported. Please provide a `HashAlgorithm` instead.") mapped_algorithm = get_signature_algorithm(padding, algorithm) digest = Hash(algorithm) digest.update(data) diff --git a/sdk/keyvault/azure-keyvault-keys/dev_requirements.txt b/sdk/keyvault/azure-keyvault-keys/dev_requirements.txt index 2dd2a406a66c..74b3eacbbda9 100644 --- a/sdk/keyvault/azure-keyvault-keys/dev_requirements.txt +++ b/sdk/keyvault/azure-keyvault-keys/dev_requirements.txt @@ -4,6 +4,6 @@ aiohttp>=3.0 azure-identity azure-mgmt-keyvault==10.1.0 -cryptography<47.0.0 +cryptography>=44.0.2 parameterized>=0.7.3 python-dateutil>=2.8.0 \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-keys/pyproject.toml b/sdk/keyvault/azure-keyvault-keys/pyproject.toml index eba6df9c8680..5da059fdcf7e 100644 --- a/sdk/keyvault/azure-keyvault-keys/pyproject.toml +++ b/sdk/keyvault/azure-keyvault-keys/pyproject.toml @@ -34,7 +34,7 @@ dependencies = [ "isodate>=0.6.1", "azure-core>=1.37.0", "typing-extensions>=4.6.0", - "cryptography>=2.1.4", + "cryptography>=44.0.2", ] dynamic = [ "version", "readme"